Commit 64a81d70 authored by dgasull's avatar dgasull

New dcinitializer

parent e8530d06
[submodule "events-model"]
path = events-model
url = https://gitlab.bsc.es/elastic-h2020/elastic-use-cases/object-detection-and-tracking/events-dataclay-model.git
[submodule "nfrtool-model"]
path = nfrtool-model
url = https://gitlab.bsc.es/elastic-h2020/elastic-sa/nfr-tool/dataclay.git
FROM bscdataclay/client:2.5.dev-alpine
FROM bscdataclay/client:alpine
ARG WORKING_DIR=/elastic
ARG PYNAMESPACE=ElasticNS
ARG JAVANAMESPACE=ElasticJavaNS
ARG FIWARENAMESPACE=FiwareNS
ARG DEFAULT_DATASET=ElasticDS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ARG DEFAULT_DATASET=defaultDS
ARG DEFAULT_USER=defaultUser
ARG DEFAULT_PASS=defaultPass
ARG CACHEBUST=1
ARG WORKING_DIR=/dataclay-initializer
ARG DC_SHARED_VOLUME=/srv/dataclay/shared
ARG LOGICMODULE_PORT_TCP
ARG LOGICMODULE_HOST
ARG JAVA_NAMESPACES
ARG PYTHON_NAMESPACES
ARG JAVA_MODELS_PATH
ARG PYTHON_MODELS_PATH
ARG IMPORT_MODELS_FROM_EXTERNAL_DC_HOSTS
ARG IMPORT_MODELS_FROM_EXTERNAL_DC_PORTS
ARG IMPORT_MODELS_FROM_EXTERNAL_DC_NAMESPACES
ENV WORKING_DIR=${WORKING_DIR} \
DC_SHARED_VOLUME=${DC_SHARED_VOLUME} \
DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
DATASET=${DEFAULT_DATASET} \
JAVANAMESPACE=${JAVANAMESPACE} \
FIWARENAMESPACE=${FIWARENAMESPACE} \
PYNAMESPACE=${PYNAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
JAVASTUBSPATH=${WORKING_DIR}/stubs/java_stubs \
PYSTUBSPATH=${WORKING_DIR}/stubs/python_stubs
WORKDIR ${WORKING_DIR}
# Install maven:
RUN apk --no-cache --update add maven
ENV DATACLAY_JAR=/home/dataclayusr/dataclay/dataclay.jar
# Copy files for connecting dataClay
COPY ./cfgfiles ${WORKING_DIR}/cfgfiles
# Copy files
COPY initialize-dataclay.sh .
COPY health_check.sh .
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/client.properties \
DATASET=${DEFAULT_DATASET} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
DC_SHARED_VOLUME=${DC_SHARED_VOLUME} \
LOGICMODULE_HOST=${LOGICMODULE_HOST} \
LOGICMODULE_PORT_TCP=${LOGICMODULE_PORT_TCP} \
JAVA_MODELS_PATH=${JAVA_MODELS_PATH} \
PYTHON_MODELS_PATH=${PYTHON_MODELS_PATH} \
JAVA_NAMESPACES=${JAVA_NAMESPACES} \
PYTHON_NAMESPACES=${PYTHON_NAMESPACES} \
IMPORT_MODELS_FROM_EXTERNAL_DC_HOSTS=${IMPORT_MODELS_FROM_EXTERNAL_DC_HOSTS} \
IMPORT_MODELS_FROM_EXTERNAL_DC_PORTS=${IMPORT_MODELS_FROM_EXTERNAL_DC_PORTS} \
IMPORT_MODELS_FROM_EXTERNAL_DC_NAMESPACES=${IMPORT_MODELS_FROM_EXTERNAL_DC_NAMESPACES}
# Models
COPY ./nfrtool-model/model ${WORKING_DIR}/nfrtool-model/model
COPY ./events-model/model ${WORKING_DIR}/events-model/model
COPY ./fiware-model/model ${WORKING_DIR}/fiware-model/model
VOLUME ${DC_SHARED_VOLUME}
ENTRYPOINT ["./initialize-dataclay.sh"]
\ No newline at end of file
ENTRYPOINT ["sh","-x","./initialize-dataclay.sh"]
\ No newline at end of file
This repository contains needed scripts to build dataClay docker images with already registered models and stubs.
## Deploy
To deploy ELASTIC dataClay images, run `deploy.sh` (usually, you would need to do that if you change some of the registered models in dataClay)
## Structure
```
.
├── deploy_pyrequirements.sh : script to deploy image with dataClay deps (due to slow build in ARM)
├── deploy.sh: Deployment script
├── Dockerfile: docker file used to register accounts, model,...
├── elastic.dsjava.Dockerfile: elasticeuh2020/dataclay-dsjava image
├── elastic.dspython.Dockerfile: elasticeuh2020/dataclay-dspython image
├── elastic.javaclient.Dockerfile: elasticeuh2020/dataclay-javaclient image (with stubs)
├── elastic.logicmodule.Dockerfile: elasticeuh2020/dataclay-logicmodule image
├── elastic.pyclient.Dockerfile: elasticeuh2020/dataclay-pyclient image (with stubs)
├── elastic.pyrequirements.Dockerfile: elasticeuh2020/dataclay-pyclient with dataClay reqs (see note below)
├── events-model: events model
├── nfrtool-model: NFRTool dataClay model
├── cfgfiles: needed files to register model...
│   ├── client.properties
│   ├── global.properties
│   ├── log4j2.xml
│   └── session.properties
├── dataclay: dataClay being extended
│   ├── docker-compose.yml
│   └── prop
│   ├── global.properties
│   └── log4j2.xml
├── README.md
├── tmp: temporary build files
```
## Note: slow build of dataClay dependencies in ARM 64
The docker image `elasticeuh2020/dataclay-pyclient:${VERSION}-requirements` was created in order to avoid building it many times since pypi repositories are not offering `arm64` wheels for package dependency `grpcio` (required for dataClay)
The image `elasticeuh2020/dataclay-pyclient:${VERSION}` extends from requirements image build before.
## Acknowledgements
This work has been supported by the EU H2020 project ELASTIC, contract #825473.
#!/bin/bash
#===================================================================================
#
# FILE: deploy.sh
# FILE: build.sh
#
# USAGE: deploy.sh
# USAGE: build.sh
#
# DESCRIPTION: Deploy ELASTIC dataClay into DockerHub
# DESCRIPTION: Build ELASTIC dataClay images
#
# OPTIONS: --
# REQUIREMENTS: ---
......@@ -15,126 +15,29 @@
# COMPANY: Barcelona Supercomputing Center (BSC)
# VERSION: 1.0
#===================================================================================
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
SUPPORTED_PYTHON_VERSIONS=(3.6 3.7 3.8)
echo "Welcome to ELASTIC-dataClay build script, please which version do you want to build?"
read VERSION
echo "Going to build:"
echo " -- elasticeuh2020/dataclay-pyclient:${VERSION} "
echo " -- elasticeuh2020/dataclay-javaclient:${VERSION} "
echo " -- elasticeuh2020/dataclay-logicmodule:${VERSION} "
echo " -- elasticeuh2020/dataclay-dspython:${VERSION} "
echo " -- elasticeuh2020/dataclay-dsjava:${VERSION} "
echo " -- elasticeuh2020/dcinitializer:${VERSION} "
echo " -- elasticeuh2020/kafka:${VERSION} "
echo " -- elasticeuh2020/mqtt-to-dataclay:${VERSION} "
read -r -p "Are you sure? [y/N] " response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]
then
SECONDS=0
## Build and start dataClay
pushd $SCRIPTDIR/dataclay
docker-compose kill
docker-compose down -v #sanity check
docker-compose up -d
popd
#
## BUILD ####
pushd $SCRIPTDIR
echo " ==== Cleaning previous images ==== "
docker images | grep elasticeuh2020 | awk '{print $3}' | xargs docker rmi -f
echo " ==== Building dataClay initializer ==== "
docker build --network dataclay_default --no-cache -t elasticeuh2020/dataclay-initializer .
echo " ==== Running dataClay initializer ==== "
rm -rf ./contractids
mkdir -p ./contractids
docker run --rm --network dataclay_default \
-v `pwd`/contractids:/contractids/:rw \
elasticeuh2020/dataclay-initializer
# create a new images based on the container
echo " ===== Building docker elasticeuh2020/dataclay-dsjava:${VERSION} ====="
ELASTIC_DSJAVA_CONTAINER_ID=$(docker commit dataclay_dsjava_1)
docker tag $ELASTIC_DSJAVA_CONTAINER_ID elasticeuh2020/dataclay-dsjava:${VERSION}
echo " ===== Building docker elasticeuh2020/dataclay-logicmodule:${VERSION} ====="
# prepare to export (needed to use docker commit)
docker exec -t dataclay_logicmodule_1 /prepare_to_export.sh
ELASTIC_LOGICMODULE_CONTAINER_ID=$(docker commit dataclay_logicmodule_1)
docker tag $ELASTIC_LOGICMODULE_CONTAINER_ID elasticeuh2020/dataclay-logicmodule:${VERSION}
echo " ===== Retrieving execution classes into $SCRIPTDIR/tmp/deploy ====="
rm -rf $SCRIPTDIR/tmp/
mkdir -p $SCRIPTDIR/tmp/deploy
docker cp dataclay_dspython_1:/home/dataclayusr/dataclay/deploy/ $SCRIPTDIR/tmp/
echo " ===== Building docker elasticeuh2020/dataclay-javaclient:${VERSION} ====="
docker build --rm -f elastic.javaclient.Dockerfile --no-cache \
-t elasticeuh2020/dataclay-javaclient:$VERSION .
docker build --no-cache -t elasticeuh2020/dataclay-initializer:$VERSION .
echo " ===== Retrieving execution classes into $SCRIPTDIR/tmp/deploy and $SCRIPTDIR/tmp/execClasses ====="
rm -rf $SCRIPTDIR/tmp/
mkdir -p $SCRIPTDIR/tmp/deploy
docker cp dataclay_dspython_1:/home/dataclayusr/dataclay/deploy/ $SCRIPTDIR/tmp/
for PYTHON_VERSION in "${SUPPORTED_PYTHON_VERSIONS[@]}"; do
DATACLAY_PYVER="${PYTHON_VERSION//./}"
IMG_VER=${VERSION}.py${DATACLAY_PYVER}
echo " ===== Building docker elasticeuh2020/dataclay-pyclient:${IMG_VER} ====="
docker build --rm -f elastic.pyclient.Dockerfile --no-cache \
--build-arg DATACLAY_PYVER="py${DATACLAY_PYVER}" \
--build-arg PYTHON_VERSION="$PYTHON_VERSION" \
-t elasticeuh2020/dataclay-pyclient:${IMG_VER} .
done
# default tag
docker tag elasticeuh2020/dataclay-pyclient:${VERSION}.py37 elasticeuh2020/dataclay-pyclient:${VERSION}
for PYTHON_VERSION in "${SUPPORTED_PYTHON_VERSIONS[@]}"; do
DATACLAY_PYVER="${PYTHON_VERSION//./}"
IMG_VER=${VERSION}.py${DATACLAY_PYVER}
echo " ===== Building docker elasticeuh2020/dataclay-dspython:${IMG_VER} ====="
docker build --rm -f elastic.dspython.Dockerfile --no-cache \
--build-arg DATACLAY_PYVER="py${DATACLAY_PYVER}" \
-t elasticeuh2020/dataclay-dspython:${IMG_VER} .
done
# default tag
docker tag elasticeuh2020/dataclay-dspython:${VERSION}.py37 elasticeuh2020/dataclay-dspython:${VERSION}
echo " ===== Building docker elasticeuh2020/kafka:${VERSION} ====="
echo " ===== Building elasticeuh2020/kafka:${VERSION} ====="
docker build --rm -f kafka.Dockerfile -t elasticeuh2020/kafka:$VERSION .
echo " ===== Building docker elasticeuh2020/mqtt-to-dataclay:${VERSION} ====="
docker build --rm -f elastic.mqtt.bridge.Dockerfile \
--build-arg PYCLIENT_VERSION=${VERSION} \
-t elasticeuh2020/mqtt-to-dataclay:$VERSION .
rm -rf ./contractids
popd
#######################################################################################
echo " ===== Stopping dataClay ====="
pushd $SCRIPTDIR/dataclay
docker-compose kill
docker-compose down -v #sanity check
popd
echo " ===== Cleaning images ====="
docker rmi -f elasticeuh2020/dataclay-initializer
docker images | grep elasticeuh2020
duration=$SECONDS
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
echo "ELASTIC dataClay deployment FINISHED! "
else
echo "Aborting"
fi
HOST=logicmodule
TCPPORT=11034
<?xml version="1.0" encoding="UTF-8"?>
<Configuration monitorInterval="60" status="off">
<Appenders>
<Console name="ConsoleAppender" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %p [%c] [%t] [%C{1}:%L] %m%n"></PatternLayout>
</Console>
</Appenders>
<Loggers>
<!-- Runtime -->
<Logger name="ClientRuntime" level="off" />
<Logger name="ClientManagementLib" level="off"/>
<Logger name="DataClayRuntime" level="off"/>
<Logger name="DataServiceRuntime" level="off"/>
<Logger name="DataClayObjectLoader" level="off"/>
<Logger name="DataClayObject" level="off" /> <!-- This is very verbose! -->
<!-- Data service -->
<Logger name="DataService" level="info"/>
<Logger name="ExecutionEnvironmentSrv" level="off"/>
<!-- Lockers -->
<Logger name="dataclay.util.classloaders.SyncClass" level="off"/>
<Logger name="dataclay.heap.LockerPool" level="off"/>
<Logger name="LockerPool" level="off"/>
<Logger name="dataclay.util.classloaders.ClassLockers" level="off"/>
<!-- Garbage collection -->
<Logger name="GlobalGC" level="off"/>
<Logger name="heap.HeapManager" level="off"/>
<Logger name="ReferenceCounting" level="off"/>
<Logger name="StorageLocation" level="off"/>
<!-- Logic module -->
<Logger name="LogicModule" level="info"/>
<Logger name="LMDB" level="off"/>
<Logger name="managers" level="off" />
<Logger name="MetaDataService.db" level="off" />
<Logger name="MetaDataService" level="off" />
<!-- Communication -->
<Logger name="io.grpc" level="off"/>
<Logger name="io.netty" level="off"/>
<Logger name="NettyClientHandler" level="off"/>
<Logger name="grpc.client" level="off"/>
<Logger name="communication.LogicModule.service" level="off"/>
<Logger name="grpc.client.logicmodule" level="off"/>
<Logger name="grpc.client.dataservice.DS1" level="off"/>
<!-- Databases -->
<Logger name="dataclay.dbhandler" level="off"/>
<Logger name="dbhandler.PostgresConnection" level="off" />
<Logger name="org.apache.commons.dbcp2" level="off"/>
<Logger name="PostgresHandler" level="off"/>
<Logger name="SQLHandler" level="off"/>
<!-- Misc -->
<Logger name="util" level="off" />
<Logger name="exceptions" level="off"/>
<Logger name="Paraver" level="info"/>
<Logger name="DataClaySerializationLib" level="off"/>
<Logger name="DataClayDeserializationLib" level="off"/>
<!-- ROOT LOGGER -->
<Root level="off">
<AppenderRef ref="ConsoleAppender" />
</Root>
</Loggers>
</Configuration>
Account=ElasticUser
Password=ElasticPass
DataSets=ElasticDS
DataSetForStore=ElasticDS
StubsClasspath=/elastic/stubs
version: '3.4'
services:
logicmodule:
image: "bscdataclay/logicmodule:2.5.dev-alpine"
command: "${COMMAND_OPTS}"
ports:
- "11034:11034"
environment:
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
- DATACLAY_ADMIN_USER=admin
- DATACLAY_ADMIN_PASSWORD=admin
stop_grace_period: 5m
healthcheck:
interval: 5s
retries: 10
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
dsjava:
image: "bscdataclay/dsjava:2.5.dev-alpine"
command: "${COMMAND_OPTS}"
ports:
- "2127:2127"
depends_on:
- logicmodule
environment:
- DATASERVICE_NAME=DS1
- DATASERVICE_JAVA_PORT_TCP=2127
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
stop_grace_period: 5m
healthcheck:
interval: 5s
retries: 10
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
dspython:
image: "bscdataclay/dspython:2.5.dev-alpine"
command: "${COMMAND_OPTS}"
ports:
- "6867:6867"
depends_on:
- logicmodule
- dsjava
environment:
- DATASERVICE_NAME=DS1
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
- DEBUG=False
stop_grace_period: 5m
healthcheck:
interval: 5s
retries: 10
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
\ No newline at end of file
FROM bscdataclay/dsjava:2.5.dev-alpine
COPY ./tmp/execClasses ${DATACLAY_HOME}/execClasses
# Execute
# Don't use CMD in order to keep compatibility with singularity container's generator
ENTRYPOINT ["dataclay-java-entry-point", "es.bsc.dataclay.dataservice.server.DataServiceSrv"]
\ No newline at end of file
ARG DATACLAY_PYVER=py37
FROM bscdataclay/dspython:2.5.${DATACLAY_PYVER}.dev-alpine
# Install packages:
RUN pip install kafka-python
COPY ./tmp/deploy ${DATACLAY_HOME}/deploy
# Execute
# Don't use CMD in order to keep compatibility with singularity container's generator
ENTRYPOINT ["dataclay-python-entry-point", "-m", "dataclay.executionenv.server"]
ARG JDK=8
FROM bscdataclay/logicmodule:2.5.jdk${JDK}.dev-alpine
FROM maven:3.6.1-jdk-8-alpine
ARG WORKING_DIR=/elastic
ARG DEFAULT_NAMESPACE=ElasticJavaNS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ARG JDK=8
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
NAMESPACE=${DEFAULT_NAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
DATACLAY_JAR=${WORKING_DIR}/dataclay.jar \
STUBSPATH=${WORKING_DIR}/stubs \
STUBS_JAR=${WORKING_DIR}/stubs.jar
# Prepare
WORKDIR ${WORKING_DIR}
# Get dataClay
COPY --from=0 /home/dataclayusr/dataclay/dataclay.jar ${DATACLAY_JAR}
# Get initializers
COPY ./cfgfiles/ ${WORKING_DIR}/cfgfiles/
COPY entrypoints/javaclient /dataclay-client
# Run
ENTRYPOINT ["/dataclay-client/entrypoint.sh"]
# Provide command (java main class...)
CMD ["--version"]
\ No newline at end of file
FROM alpine:3
RUN apk --no-cache --update add sqlite
COPY ./tmp/LM.sqlite /tmp/dataclay/dump.sql
RUN mkdir -p "/dataclay/storage"
RUN sqlite3 "/dataclay/storage/LM" ".read /tmp/dataclay/dump.sql"
FROM bscdataclay/logicmodule:2.5.dev-alpine
COPY --from=0 /dataclay/storage/LM /dataclay/storage/LM
# The command can contain additional options for the Java Virtual Machine and
# must contain a class to be executed.
ENTRYPOINT ["dataclay-java-entry-point", "es.bsc.dataclay.logic.server.LogicModuleSrv"]
# Don't use CMD in order to keep compatibility with singularity container's generator
\ No newline at end of file
ARG PYCLIENT_VERSION
FROM elasticeuh2020/dataclay-pyclient:${PYCLIENT_VERSION}
FROM nuvlabox/mqtt-fiware-bridge:0.0.1
# Install packages:
RUN apt-get update && apt-get install --no-install-recommends -y build-essential gcc libc-dev && rm -rf /var/lib/apt/lists/*
# Install dataClay last version (different layer for grpcio dependency since it takes time)
RUN pip install --upgrade pip
RUN pip install grpcio==1.25.0
RUN pip install --pre --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple dataClay
# Get environment from arguments
ENV WORKING_DIR=/opt/nuvlabox
ARG DEFAULT_NAMESPACE=FiwareNS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
NAMESPACE=${DEFAULT_NAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
STUBSPATH=/elastic/stubs/fiware
COPY --from=0 /dataclay-client/fiware-contractid /srv/dataclay/shared/contractid
# Copy app
COPY mqtt-to-dataclay/ ${WORKING_DIR}
RUN mkdir -p ${STUBSPATH}
# Run
ENTRYPOINT ["python","-u","mqtt_to_dataclay.py"]
\ No newline at end of file
ARG DATACLAY_PYVER
ARG PYTHON_VERSION
FROM bscdataclay/dspython:2.5.${DATACLAY_PYVER}.dev-alpine
FROM python:${PYTHON_VERSION}-alpine
ARG WORKING_DIR=/elastic
ARG DEFAULT_NAMESPACE=ElasticNS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
NAMESPACE=${DEFAULT_NAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
STUBSPATH=${WORKING_DIR}/stubs
# Prepare
WORKDIR ${WORKING_DIR}
# =============== INSTALL DATACLAY =================== #
RUN apk add libstdc++
ENV DATACLAY_VIRTUAL_ENV=${WORKING_DIR}/dataclay_venv
COPY --from=0 /home/dataclayusr/dataclay/dataclay_venv ${DATACLAY_VIRTUAL_ENV}
ENV PATH="$DATACLAY_VIRTUAL_ENV/bin:$PATH"
# check dataclay is installed
RUN python --version
RUN python -c "import dataclay; print('import ok')"
# install mqtt
RUN python -m pip install paho-mqtt
# ================================== #
# Get dc initializer
COPY ./cfgfiles/ ${WORKING_DIR}/cfgfiles/
COPY entrypoints/pyclient /dataclay-client/
COPY ./contractids/events-contractid /dataclay-client/events-contractid
COPY ./contractids/fiware-contractid /dataclay-client/fiware-contractid
# Run
ENTRYPOINT ["/dataclay-client/entrypoint.sh"]
# Provide python command (main class...)
CMD ["--version"]
\ No newline at end of file
#!/bin/sh
# check if stubs exist
if [ ! -f "STUBS_JAR" ]; then
mkdir -p ${STUBSPATH}
java -cp $DATACLAY_JAR es.bsc.dataclay.tool.GetStubs ${USER} ${PASS} ${NAMESPACE} ${STUBSPATH}
jar cvf ${STUBS_JAR} -C ${STUBSPATH} .
rm -rf ${STUBSPATH}
# Install stubs in local repository to use it as a pom dependency
mvn install:install-file -Dfile=${STUBS_JAR} -DgroupId=es.bsc.dataclay \
-DartifactId=nfrtool-dataclay-stubs -Dversion=latest -Dpackaging=jar -DcreateChecksum=true
fi
# This will exec the CMD from your Dockerfile
exec mvn "$@"
\ No newline at end of file
#!/usr/bin/env sh
# piping to sed prefixes the logs with the corresponding application name
run_zookeeper() {
bin/zookeeper-server-start.sh config/zookeeper.properties | sed "s/^/[zookeeper] /"
}
wait_zookeeper() {
while ! nc -z localhost 2181; do
echo "Waiting for zookeeper"
sleep 0.1
done
}
run_kafka() {
echo -e "\nadvertised.host.name=${ADVERTISED_HOST:-localhost}" >> config/server.properties
echo -e "\nadvertised.port=${ADVERTISED_PORT:-9092}" >> config/server.properties
bin/kafka-server-start.sh config/server.properties | sed "s/^/[kafka-broker] /"
}
# run both in parallel but wait for zookeeper to startup
{ run_zookeeper & (wait_zookeeper && run_kafka) ; }
\ No newline at end of file
#!/bin/sh
# check if stubs exist
if [ -d "$STUBSPATH" ] && [ "$(ls -A $STUBSPATH)" ]; then
echo "Stubs found"
else
mkdir -p ${STUBSPATH}/events
mkdir -p ${STUBSPATH}/fiware
EVENTS_CONTRACTID=$(cat /dataclay-client/events-contractid)
FIWARE_CONTRACTID=$(cat /dataclay-client/fiware-contractid)
echo "Getting Events model stubs using contract id : $EVENTS_CONTRACTID"
python -m dataclay.tool get_stubs ${USER} ${PASS} ${EVENTS_CONTRACTID} ${STUBSPATH}/events
echo "Getting Fiware model stubs using contract id : $FIWARE_CONTRACTID"
python -m dataclay.tool get_stubs ${USER} ${PASS} ${FIWARE_CONTRACTID} ${STUBSPATH}/fiware
fi
# This will exec the CMD from your Dockerfile
exec python -u "$@"
\ No newline at end of file
from dataclay import DataClayObject, dclayMethod
from dataclay.contrib.kafka import KafkaMixin
class Location(DataClayObject, KafkaMixin):
"""
@ClassField obj_type str
@ClassField coordinates list<float>
"""
@dclayMethod(type="str", coordinates="list<float>")
def __init__(self, type, coordinates):
self.obj_type = type
self.coordinates = coordinates
@dclayMethod(return_="str")
def get_json(self):
import json
jsonDict = { "type": self.obj_type, "coordinates": self.coordinates }
jsonStr = json.dumps(jsonDict)
return jsonStr
class Vehicle(DataClayObject, KafkaMixin):
"""
@ClassField obj_id str
@ClassField obj_type str
@ClassField vehicleType str
@ClassField category list<str>
@ClassField location FiwareNS.classes.Location
@ClassField name str
@ClassField speed int
@ClassField cargoWeight int
@ClassField serviceStatus str
@ClassField serviceProvided list<str>
@ClassField areaServed str
@ClassField refVehicleModel str
@ClassField vehiclePlateIdentifier str
"""
@dclayMethod(msg_json="dict<str, anything>")
def __init__(self, msg_json):
self.obj_id = msg_json["id"]
self.obj_type = msg_json["type"]
self.vehicleType = msg_json["vehicleType"]
self.category = msg_json["category"]
self.location = Location(msg_json["location"]["type"], msg_json["location"]["coordinates"])
self.name = msg_json["name"]
self.speed = msg_json["speed"]
self.cargoWeight = msg_json["cargoWeight"]
self.serviceStatus = msg_json["serviceStatus"]
self.serviceProvided = msg_json["serviceProvided"]
self.areaServed = msg_json["areaServed"]
self.refVehicleModel = msg_json["refVehicleModel"]
self.vehiclePlateIdentifier = msg_json["vehiclePlateIdentifier"]
@dclayMethod(return_="str")
def get_json(self):
import json
jsonDict = {"id": self.obj_id, "type": self.obj_type, "vehicleType": self.vehicleType, "category": self.category,
"location": { "type": self.location.obj_type, "coordinates": self.location.coordinates },
"name": self.name, "speed": self.speed, "cargoWeight": self.cargoWeight,
"serviceStatus": self.serviceStatus, "serviceProvided": self.serviceProvided,
"areaServed": self.areaServed, "refVehicleModel": self.refVehicleModel,
"vehiclePlateIdentifier": self.vehiclePlateIdentifier }
jsonStr = json.dumps(jsonDict)
return jsonStr
#!/bin/sh
if [ -f "/dataclay-initializer/state.txt" ]; then
if [ "$(cat /dataclay-initializer/state.txt)" = "READY" ]; then
echo "HEALTHY"
exit 0
else
echo "UNHEALTHY"
exit 1
fi
else
exit 1
fi
\ No newline at end of file
#!/bin/sh
set -x
set -e
......@@ -7,39 +6,92 @@ set -e
# - $DATACLAY_JAR
# - $USER
# - $PASS
# - $JAVANAMESPACE
# - $PYNAMESPACE
# - $FIWARENAMESPACE
# - $DATASET
# - $DC_SHARED_VOLUME
# - $LOGICMODULE_PORT_TCP
# - $LOGICMODULE_HOST
# - $JAVA_NAMESPACES
# - $PYTHON_NAMESPACES
# - $JAVA_MODELS_PATH
# - $PYTHON_MODELS_PATH
# - $IMPORT_MODELS_FROM_EXTERNAL_DC_HOSTS
# - $IMPORT_MODELS_FROM_EXTERNAL_DC_PORTS
# - $IMPORT_MODELS_FROM_EXTERNAL_DC_NAMESPACES
########################### create cfgfiles ###########################
printf "HOST=${LOGICMODULE_HOST}\nTCPPORT=${LOGICMODULE_PORT_TCP}" > ${DATACLAYCLIENTCONFIG}
######################################################
# Wait for dataclay to be alive (max retries 10 and 5 seconds per retry)
dataclaycmd WaitForDataClayToBeAlive 10 5
# Register account
dataclaycmd NewAccount ${USER} ${PASS}
# Register datacontract
dataclaycmd NewDataContract ${USER} ${PASS} ${DATASET} ${USER}
########################### java model ###########################
# Register java model