Commit 04b29cb2 authored by dgasull's avatar dgasull

Created deploy script to publish elastic docker images with models

parent e2800edc
/.pydevproject
/.project
/LM.sqlite
/tmp/
[submodule "events-model"]
path = events-model
url = https://gitlab.bsc.es/elastic-h2020/elastic-sa/events-dataclay-model.git
[submodule "nfrtool-model"]
path = nfrtool-model
url = https://gitlab.bsc.es/elastic-h2020/elastic-sa/dataclay.git
FROM bscdataclay/client:2.4
LABEL maintainer dataClay team <support-dataclay@bsc.es>
FROM ubuntu:18.04
# Prepare environment
ENV ELASTIC_HOME=/elastic
WORKDIR ${ELASTIC_HOME}
ENV DATACLAYCLIENTCONFIG=${ELASTIC_HOME}/cfgfiles/client.properties
ENV DATACLAYGLOBALCONFIG=${ELASTIC_HOME}/cfgfiles/global.properties
ENV DATACLAYSESSIONCONFIG=${ELASTIC_HOME}/cfgfiles/session.properties
ENV NAMESPACE=ElasticNS
ENV USER=ElasticUser
ENV PASS=ElasticPass
ENV DATASET=ElasticDS
ENV MODELBINPATH=${ELASTIC_HOME}/src
# If we want to run demo again, argument must be modified
ARG PYNAMESPACE=ElasticNS
ARG JAVANAMESPACE=ElasticJavaNS
ARG DATASET=ElasticDS
ARG USER=ElasticUser
ARG PASS=ElasticPass
ARG DATACLAY_VIRTUAL_ENV=/dataclay_venv
ARG DATACLAY_PYVER=3.7
ARG DATACLAY_PIP_VERSION=3
ARG CACHEBUST=1
# Copy files
COPY ./src ${ELASTIC_HOME}/src
COPY ./cfgfiles ${ELASTIC_HOME}/cfgfiles
RUN cp ${ELASTIC_HOME}/cfgfiles/log4j2.xml ${DATACLAY_LOG_CONFIG}
ENV ELASTIC_HOME=/elastic
ENV DATACLAYCLIENTCONFIG=/elastic/cfgfiles/client.properties
ENV DATACLAYGLOBALCONFIG=/elastic/cfgfiles/global.properties
ENV DATACLAYSESSIONCONFIG=/elastic/cfgfiles/session.properties
ENV JAVASTUBSPATH=/elastic/stubs/java_stubs
ENV PYSTUBSPATH=/elastic/stubs/python_stubs
# Wait for dataclay to be alive (max retries 10 and 5 seconds per retry)
RUN dataclaycmd WaitForDataClayToBeAlive 10 5
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated openjdk-8-jdk \
maven python${DATACLAY_PYVER} gcc libyaml-dev libpython${DATACLAY_PYVER}-dev build-essential curl libpython2.7-dev \
python${DATACLAY_PYVER}-dev python${DATACLAY_PIP_VERSION}-pip \
python${DATACLAY_PIP_VERSION}-setuptools \
&& rm -rf /var/lib/apt/lists/*
# Register account
RUN dataclaycmd NewAccount ${USER} ${PASS}
ENV DATACLAY_JAR="/root/.m2/repository/es/bsc/dataclay/dataclay/2.4/dataclay-2.4-jar-with-dependencies.jar"
# Get dataClay maven dependency and set env
RUN mvn -DgroupId=es.bsc.dataclay -DartifactId=dataclay -Dversion=2.4 -Dclassifier=jar-with-dependencies dependency:get
# If we want to build again, argument must be modified
ARG CACHEBUST
# Copy files for connecting dataClay
COPY ./cfgfiles ${ELASTIC_HOME}/cfgfiles
# Wait for dataclay to be alive (max retries 10 and 5 seconds per retry)
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.WaitForDataClayToBeAlive 10 5
# Register account
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewAccount ${USER} ${PASS}
# Register datacontract
RUN dataclaycmd NewDataContract ${USER} ${PASS} ${DATASET} ${USER}
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewDataContract ${USER} ${PASS} ${DATASET} ${USER}
########################### java model ###########################
# Register namespace
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewNamespace ${USER} ${PASS} ${JAVANAMESPACE} java
# Register java model
COPY ./nfrtool-model/model ${ELASTIC_HOME}/nfrtool-model/model
RUN cd ${ELASTIC_HOME}/nfrtool-model/model && mvn package
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewModel ${USER} ${PASS} ${JAVANAMESPACE} ${ELASTIC_HOME}/nfrtool-model/model/target/classes
# Access namespace
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.AccessNamespace ${USER} ${PASS} ${JAVANAMESPACE}
# Get java stubs
RUN mkdir -p ${JAVASTUBSPATH}
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.GetStubs ${USER} ${PASS} ${JAVANAMESPACE} ${JAVASTUBSPATH}
########################### python model ###########################
# Register namespace
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewNamespace ${USER} ${PASS} ${PYNAMESPACE} python
# Create virtualenvironment
RUN pip${DATACLAY_PIP_VERSION} install --upgrade pip
RUN pip${DATACLAY_PIP_VERSION} install wheel
RUN pip${DATACLAY_PIP_VERSION} install virtualenv
RUN python${DATACLAY_PIP_VERSION} -m virtualenv --python=/usr/bin/python${DATACLAY_PYVER} ${DATACLAY_VIRTUAL_ENV}
ENV PATH="$DATACLAY_VIRTUAL_ENV/bin:$PATH"
RUN python${DATACLAY_PYVER} --version
# Install dataClay
RUN pip${DATACLAY_PIP_VERSION} install dataClay
# Copy models
COPY ./events-model/model ${ELASTIC_HOME}/events-model/model
# Register model
RUN dataclaycmd NewModel ${USER} ${PASS} ${NAMESPACE} ${MODELBINPATH} python
# Register python models
RUN python -m dataclay.tool register_model ${USER} ${PASS} ${PYNAMESPACE} ${ELASTIC_HOME}/events-model/model/src
# Run
ENTRYPOINT ["Nothing to do here"]
# Get stubs
RUN mkdir -p ${PYSTUBSPATH}
RUN CONTRACTID=`java -cp $DATACLAY_JAR es.bsc.dataclay.tool.AccessNamespace ${USER} ${PASS} ${PYNAMESPACE} | tail -1` \
&& python${DATACLAY_PYVER} -m dataclay.tool get_stubs ${USER} ${PASS} ${CONTRACTID} ${PYSTUBSPATH}
Extension of dataClay for ELASTIC applications
\ No newline at end of file
This repository contains needed scripts to build dataClay docker images with already registered models and stubs.
## Deploy
To deploy ELASTIC dataClay images, run `deploy.sh` (usually, you would need to do that if you change some of the registered models in dataClay)
## Structure
```
.
├── deploy_pyrequirements.sh : script to deploy image with dataClay deps (due to slow build in ARM)
├── deploy.sh: Deployment script
├── Dockerfile: docker file used to register accounts, model,...
├── elastic.dsjava.Dockerfile: elasticeuh2020/dataclay-dsjava image
├── elastic.dspython.Dockerfile: elasticeuh2020/dataclay-dspython image
├── elastic.javaclient.Dockerfile: elasticeuh2020/dataclay-javaclient image (with stubs)
├── elastic.logicmodule.Dockerfile: elasticeuh2020/dataclay-logicmodule image
├── elastic.pyclient.Dockerfile: elasticeuh2020/dataclay-pyclient image (with stubs)
├── elastic.pyrequirements.Dockerfile: elasticeuh2020/dataclay-pyclient with dataClay reqs (see note below)
├── events-model: events model
├── nfrtool-model: NFRTool dataClay model
├── cfgfiles: needed files to register model...
│   ├── client.properties
│   ├── global.properties
│   ├── log4j2.xml
│   └── session.properties
├── dataclay: dataClay being extended
│   ├── docker-compose.yml
│   └── prop
│   ├── global.properties
│   └── log4j2.xml
├── README.md
├── tmp: temporary build files
```
## Note: slow build of dataClay dependencies in ARM 64
The docker image `elasticeuh2020/dataclay-pyclient:${VERSION}-requirements` was created in order to avoid building it many times since pypi repositories are not offering `arm64` wheels for package dependency `grpcio` (required for dataClay)
The image `elasticeuh2020/dataclay-pyclient:${VERSION}` extends from requirements image build before.
#!/bin/bash
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
#
## Build and start dataClay
#pushd $SCRIPTDIR/dataclay
#docker-compose kill
#docker-compose down -v #sanity check
#docker-compose up -d
#popd
#
## BUILD ####
#pushd $SCRIPTDIR
#docker build --network=dataclay_default \
# --build-arg CACHEBUST=$(date +%s) \
# -t bscdataclay/client:2.1-elastic .
#popd
#
#echo " ===== Retrieving execution classes into $SCRIPTDIR/deploy ====="
## Copy execClasses from docker
#rm -rf $SCRIPTDIR/deploy
#rm -rf $SCRIPTDIR/execClasses
#mkdir -p $SCRIPTDIR/deploy
#mkdir -p $SCRIPTDIR/execClasses
#docker cp dataclay_dspython_1:/home/dataclayusr/dataclay/deploy/ $SCRIPTDIR
#docker cp dataclay_dsjava_1:/home/dataclayusr/dataclay/execClasses/ $SCRIPTDIR
#
#echo " ===== Retrieving SQLITE LM into $SCRIPTDIR/LM.sqlite ====="
#rm -f $SCRIPTDIR/LM.sqlite
#TABLES="account credential contract interface ifaceincontract opimplementations datacontract dataset accessedimpl accessedprop type java_type python_type memoryfeature cpufeature langfeature archfeature prefetchinginfo implementation python_implementation java_implementation annotation property java_property python_property operation java_operation python_operation metaclass java_metaclass python_metaclass namespace"
#for table in $TABLES;
#do
# docker exec -t dataclay_logicmodule_1 sqlite3 "//dataclay/storage/LM" ".dump $table" >> $SCRIPTDIR/LM.sqlite
#done
#
#echo " ===== Stopping dataClay ====="
#pushd $SCRIPTDIR/dataclay
#docker-compose -f docker-compose.yml down
#popd
pushd $SCRIPTDIR
echo " ===== Building docker bscdataclay/logicmodule-elastic ====="
docker build -f elastic.logicmodule.Dockerfile -t bscdataclay/logicmodule-elastic:2.4 .
echo " ===== Building docker bscdataclay/dsjava-elastic ====="
docker build -f elastic.dsjava.Dockerfile -t bscdataclay/dsjava-elastic:2.4 .
echo " ===== Building docker bscdataclay/dspython-elastic ====="
docker build -f elastic.dspython.Dockerfile -t bscdataclay/dspython-elastic:2.4 .
popd
HOST=logicmodule
TCPPORT=11034
CHECK_LOG4J_DEBUG=false
<?xml version="1.0" encoding="UTF-8"?>
<Configuration monitorInterval="60" status="off">
<Appenders>
<Console name="ConsoleAppender" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %p [%c] [%t] [%C{1}:%L] %m%n"></PatternLayout>
</Console>
</Appenders>
<Loggers>
<!-- Runtime -->
<Logger name="ClientRuntime" level="off" />
<Logger name="ClientManagementLib" level="off"/>
<Logger name="DataClayRuntime" level="off"/>
<Logger name="DataServiceRuntime" level="off"/>
<Logger name="DataClayObjectLoader" level="off"/>
<Logger name="DataClayObject" level="off" /> <!-- This is very verbose! -->
<!-- Data service -->
<Logger name="DataService" level="info"/>
<Logger name="ExecutionEnvironmentSrv" level="off"/>
<!-- Lockers -->
<Logger name="dataclay.util.classloaders.SyncClass" level="off"/>
<Logger name="dataclay.heap.LockerPool" level="off"/>
<Logger name="LockerPool" level="off"/>
<Logger name="dataclay.util.classloaders.ClassLockers" level="off"/>
<!-- Garbage collection -->
<Logger name="GlobalGC" level="off"/>
<Logger name="heap.HeapManager" level="off"/>
<Logger name="ReferenceCounting" level="off"/>
<Logger name="StorageLocation" level="off"/>
<!-- Logic module -->
<Logger name="LogicModule" level="info"/>
<Logger name="LMDB" level="off"/>
<Logger name="managers" level="off" />
<Logger name="MetaDataService.db" level="off" />
<Logger name="MetaDataService" level="off" />
<!-- Communication -->
<Logger name="io.grpc" level="off"/>
<Logger name="io.netty" level="off"/>
<Logger name="NettyClientHandler" level="off"/>
<Logger name="grpc.client" level="off"/>
<Logger name="communication.LogicModule.service" level="off"/>
<Logger name="grpc.client.logicmodule" level="off"/>
<Logger name="grpc.client.dataservice.DS1" level="off"/>
<!-- Databases -->
<Logger name="dataclay.dbhandler" level="off"/>
<Logger name="dbhandler.PostgresConnection" level="off" />
<Logger name="org.apache.commons.dbcp2" level="off"/>
<Logger name="PostgresHandler" level="off"/>
<Logger name="SQLHandler" level="off"/>
<!-- Misc -->
<Logger name="util" level="off" />
<Logger name="exceptions" level="off"/>
<Logger name="Paraver" level="info"/>
<Logger name="DataClaySerializationLib" level="off"/>
<Logger name="DataClayDeserializationLib" level="off"/>
<!-- ROOT LOGGER -->
<Root level="off">
<AppenderRef ref="ConsoleAppender" />
</Root>
</Loggers>
</Configuration>
Account=ElasticUser
Password=ElasticPass
DataSets=ElasticDS
DataSetForStore=ElasticDS
StubsClasspath=/demo/stubs
version: '3.4'
services:
logicmodule:
image: "bscdataclay/logicmodule:2.4"
command: "${COMMAND_OPTS}"
ports:
- "11034:11034"
environment:
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
- DATACLAY_ADMIN_USER=admin
- DATACLAY_ADMIN_PASSWORD=admin
volumes:
- ./prop/global.properties:/home/dataclayusr/dataclay/cfgfiles/global.properties:ro
- ./prop/log4j2.xml:/home/dataclayusr/dataclay/logging/log4j2.xml:ro
stop_grace_period: 5m
healthcheck:
interval: 5s
retries: 10
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
dsjava:
image: "bscdataclay/dsjava:2.4"
command: "${COMMAND_OPTS}"
ports:
- "2127:2127"
depends_on:
- logicmodule
environment:
- DATASERVICE_NAME=DS1
- DATASERVICE_JAVA_PORT_TCP=2127
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
volumes:
- ./prop/global.properties:/home/dataclayusr/dataclay/cfgfiles/global.properties:ro
- ./prop/log4j2.xml:/home/dataclayusr/dataclay/logging/log4j2.xml:ro
stop_grace_period: 5m
healthcheck:
interval: 5s
retries: 10
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
dspython:
image: "bscdataclay/dspython:2.4"
command: "${COMMAND_OPTS}"
ports:
- "6867:6867"
depends_on:
- logicmodule
- dsjava
environment:
- DATASERVICE_NAME=DS1
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
- DEBUG=False
volumes:
- ./prop/global.properties:/home/dataclayusr/dataclay/cfgfiles/global.properties:ro
stop_grace_period: 5m
healthcheck:
interval: 5s
retries: 10
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
\ No newline at end of file
CHECK_LOG4J_DEBUG=false
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<Configuration monitorInterval="60" status="off">
<Appenders>
<Console name="ConsoleAppender" target="SYSTEM_OUT">
<PatternLayout
pattern="%d{ISO8601} %p [%c] [%t] [%C{1}:%L] %m%n"></PatternLayout>
</Console>
</Appenders>
<Loggers>
<!-- Runtime -->
<Logger name="ClientRuntime" level="off" />
<Logger name="ClientManagementLib" level="off" />
<Logger name="DataClayRuntime" level="off" />
<Logger name="DataServiceRuntime" level="off" />
<Logger name="DataClayObjectLoader" level="off" />
<Logger name="DataClayObject" level="off" /> <!-- This is very verbose! -->
<!-- Data service -->
<Logger name="DataService" level="off" />
<Logger name="ExecutionEnvironmentSrv" level="off" />
<!-- Lockers -->
<Logger name="dataclay.util.classloaders.SyncClass" level="off" />
<Logger name="dataclay.heap.LockerPool" level="off" />
<Logger name="LockerPool" level="off" />
<Logger name="dataclay.util.classloaders.ClassLockers"
level="off" />
<!-- Garbage collection -->
<Logger name="GlobalGC" level="off" />
<Logger name="heap.HeapManager" level="off" />
<Logger name="ReferenceCounting" level="off" />
<Logger name="StorageLocation" level="off" />
<!-- Logic module -->
<Logger name="LogicModule" level="off" />
<Logger name="LMDB" level="off" />
<Logger name="managers" level="off" />
<Logger name="MetaDataService.db" level="off" />
<Logger name="MetaDataService" level="off" />
<!-- Communication -->
<Logger name="io.grpc" level="off" />
<Logger name="io.netty" level="off" />
<Logger name="NettyClientHandler" level="off" />
<Logger name="grpc.client" level="off" />
<Logger name="grpc.client.logicmodule" level="off" />
<Logger name="communication.LogicModule.service" level="off" />
<!-- Databases -->
<Logger name="dataclay.dbhandler" level="off" />
<Logger name="dbhandler.PostgresConnection" level="off" />
<Logger name="org.apache.commons.dbcp2" level="off" />
<Logger name="PostgresHandler" level="off" />
<Logger name="SQLHandler" level="off" />
<!-- Misc -->
<Logger name="util" level="off" />
<Logger name="exceptions" level="off" />
<Logger name="Paraver" level="info" />
<Logger name="DataClaySerializationLib" level="off" />
<Logger name="DataClayDeserializationLib" level="off" />
<!-- ROOT LOGGER -->
<Root level="off">
<AppenderRef ref="ConsoleAppender" />
</Root>
</Loggers>
</Configuration>
#!/bin/bash
#===================================================================================
#
# FILE: deploy.sh
#
# USAGE: deploy.sh
#
# DESCRIPTION: Deploy ELASTIC dataClay into DockerHub
#
# OPTIONS: see function ’usage’ below
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: dgasull@bsc.es
# COMPANY: Barcelona Supercomputing Center (BSC)
# VERSION: 1.0
#===================================================================================
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
VERSION=1.0
REQUIRED_DOCKER_VERSION=19
export PLATFORMS=linux/amd64,linux/arm64
############################# Prepare docker builder #############################
printf "Checking if docker version >= $REQUIRED_DOCKER_VERSION..."
version=$(docker version --format '{{.Server.Version}}')
if [[ "$version" < "$REQUIRED_DOCKER_VERSION" ]]; then
echo "ERROR: Docker version is less than $REQUIRED_DOCKER_VERSION"
exit 1
fi
printf "OK\n"
# prepare architectures
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
#docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
docker run --rm -t arm64v8/ubuntu uname -m
DOCKER_BUILDER=$(docker buildx create)
docker buildx use $DOCKER_BUILDER
echo "Checking buildx with available platforms to simulate..."
docker buildx inspect --bootstrap
BUILDER_PLATFORMS=$(docker buildx inspect --bootstrap | grep Platforms | awk -F":" '{print $2}')
IFS=',' read -ra BUILDER_PLATFORMS_ARRAY <<< "$BUILDER_PLATFORMS"
IFS=',' read -ra SUPPORTED_PLATFORMS_ARRAY <<< "$PLATFORMS"
echo "Builder created with platforms: ${BUILDER_PLATFORMS_ARRAY[@]}"
#Print the split string
for i in "${SUPPORTED_PLATFORMS_ARRAY[@]}"
do
FOUND=false
SUP_PLATFORM=`echo $i | sed 's/ *$//g'` #remove spaces
printf "Checking if platform $i can be simulated by buildx..."
for j in "${BUILDER_PLATFORMS_ARRAY[@]}"
do
B_PLATFORM=`echo $j | sed 's/ *$//g'` #remove spaces
if [ "$SUP_PLATFORM" == "$B_PLATFORM" ]; then
FOUND=true
break
fi
done
if [ "$FOUND" = false ] ; then
echo "ERROR: missing support for $i in buildx builder."
echo " Check https://github.com/multiarch/qemu-user-static for more information on how to simulate architectures"
return -1
fi
printf "OK\n"
done
#######################################################################################
## Build and start dataClay
pushd $SCRIPTDIR/dataclay
docker-compose kill
docker-compose down -v #sanity check
docker-compose up -d
popd
#
## BUILD ####
pushd $SCRIPTDIR
docker build --network dataclay_default --build-arg CACHEBUST=$(date +%s) -t elasticeuh2020/dataclay-model-creator .
#
rm -rf $SCRIPTDIR/tmp/
echo " ===== Retrieving execution classes into $SCRIPTDIR/tmp/deploy and $SCRIPTDIR/tmp/execClasses ====="
## Copy execClasses from docker
mkdir -p $SCRIPTDIR/tmp/deploy
mkdir -p $SCRIPTDIR/tmp/execClasses
docker cp dataclay_dspython_1:/home/dataclayusr/dataclay/deploy/ $SCRIPTDIR/tmp/
docker cp dataclay_dsjava_1:/home/dataclayusr/dataclay/execClasses/ $SCRIPTDIR/tmp/
#
echo " ===== Retrieving SQLITE LM into $SCRIPTDIR/tmp/LM.sqlite ====="
rm -f $SCRIPTDIR/tmp/LM.sqlite
TABLES="account credential contract interface ifaceincontract opimplementations datacontract dataset accessedimpl accessedprop type java_type python_type memoryfeature cpufeature langfeature archfeature prefetchinginfo implementation python_implementation java_implementation annotation property java_property python_property operation java_operation python_operation metaclass java_metaclass python_metaclass namespace"
for table in $TABLES;
do
docker exec -t dataclay_logicmodule_1 sqlite3 "//dataclay/storage/LM" ".dump $table" >> $SCRIPTDIR/tmp/LM.sqlite
done
echo " ===== Retrieving stubs ====="
mkdir -p $SCRIPTDIR/tmp/stubs
id=$(docker create elasticeuh2020/dataclay-model-creator)
docker cp $id:/elastic/stubs/ $SCRIPTDIR/tmp/
docker rm -v $id
############################# Push into DockerHub #############################
#docker login -u=elasticeuh2020
echo " ===== Building docker elasticeuh2020/dataclay-pyclient:${VERSION} ====="
docker buildx build -f elastic.pyclient.Dockerfile --no-cache \
--build-arg REQ_VERSION=${VERSION} \
--cache-to=type=registry,ref=elasticeuh2020/dataclay-pyclient:buildxcache${VERSION},mode=max \
--cache-from=type=registry,ref=elasticeuh2020/dataclay-pyclient:buildxcache${VERSION} \
-t elasticeuh2020/dataclay-pyclient:$VERSION --platform $PLATFORMS --push .
echo " ===== Building docker elasticeuh2020/dataclay-javaclient:${VERSION} ====="
docker buildx build -f elastic.javaclient.Dockerfile --no-cache \
--cache-to=type=registry,ref=elasticeuh2020/dataclay-javaclient:buildxcache${VERSION},mode=max \
--cache-from=type=registry,ref=elasticeuh2020/dataclay-javaclient:buildxcache${VERSION} \
-t elasticeuh2020/dataclay-javaclient:$VERSION --platform $PLATFORMS --push .
echo " ===== Building docker elasticeuh2020/dataclay-logicmodule:${VERSION} ====="
docker buildx build -f elastic.logicmodule.Dockerfile --no-cache \
-t elasticeuh2020/dataclay-logicmodule:$VERSION --platform $PLATFORMS --push .
echo " ===== Building docker elasticeuh2020/dataclay-dsjava:${VERSION} ====="
docker buildx build -f elastic.dsjava.Dockerfile --no-cache \
-t elasticeuh2020/dataclay-dsjava:$VERSION --platform $PLATFORMS --push .
echo " ===== Building docker elasticeuh2020/dataclay-dspython:${VERSION} ====="
docker buildx build -f elastic.dspython.Dockerfile --no-cache \
-t elasticeuh2020/dataclay-dspython:$VERSION --platform $PLATFORMS --push .
popd
#docker logout
docker buildx rm $DOCKER_BUILDER
#######################################################################################
echo " ===== Stopping dataClay ====="
pushd $SCRIPTDIR/dataclay
docker-compose -f docker-compose.yml down -v
popd
#!/bin/bash
#===================================================================================
#
# FILE: deploy.sh
#
# USAGE: deploy.sh
#
# DESCRIPTION: Deploy python requirements into DockerHub
#
# OPTIONS: see function ’usage’ below
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: dgasull@bsc.es
# COMPANY: Barcelona Supercomputing Center (BSC)
# VERSION: 1.0
#===================================================================================
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
VERSION=1.0
REQUIRED_DOCKER_VERSION=19
export PLATFORMS=linux/amd64,linux/arm64
############################# Prepare docker builder #############################
printf "Checking if docker version >= $REQUIRED_DOCKER_VERSION..."
version=$(docker version --format '{{.Server.Version}}')
if [[ "$version" < "$REQUIRED_DOCKER_VERSION" ]]; then
echo "ERROR: Docker version is less than $REQUIRED_DOCKER_VERSION"
exit 1
fi
printf "OK\n"
# prepare architectures
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
#docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
docker run --rm -t arm64v8/ubuntu uname -m
DOCKER_BUILDER=$(docker buildx create)
docker buildx use $DOCKER_BUILDER
echo "Checking buildx with available platforms to simulate..."
docker buildx inspect --bootstrap
BUILDER_PLATFORMS=$(docker buildx inspect --bootstrap | grep Platforms | awk -F":" '{print $2}')
IFS=',' read -ra BUILDER_PLATFORMS_ARRAY <<< "$BUILDER_PLATFORMS"
IFS=',' read -ra SUPPORTED_PLATFORMS_ARRAY <<< "$PLATFORMS"
echo "Builder created with platforms: ${BUILDER_PLATFORMS_ARRAY[@]}"
#Print the split string
for i in "${SUPPORTED_PLATFORMS_ARRAY[@]}"
do
FOUND=false
SUP_PLATFORM=`echo $i | sed 's/ *$//g'` #remove spaces
printf "Checking if platform $i can be simulated by buildx..."
for j in "${BUILDER_PLATFORMS_ARRAY[@]}"
do
B_PLATFORM=`echo $j | sed 's/ *$//g'` #remove spaces
if [ "$SUP_PLATFORM" == "$B_PLATFORM" ]; then
FOUND=true
break
fi
done
if [ "$FOUND" = false ] ; then
echo "ERROR: missing support for $i in buildx builder."
echo " Check https://github.com/multiarch/qemu-user-static for more information on how to simulate architectures"
return -1
fi
printf "OK\n"
done
#######################################################################################
# NOTE we create a requirements image to avoid building it many times (it may take time for ARM64)
echo " ===== Building docker elasticeuh2020/dataclay-pyclient:${VERSION}-requirements ====="
echo " WARNING!!!!!!!!!!!!!!!!!!: It may take a lot of time for ARM64 arch "
docker buildx build -f elastic.pyrequirements.Dockerfile \
-t elasticeuh2020/dataclay-pyclient:${VERSION}-requirements --platform $PLATFORMS --push .
docker buildx rm $DOCKER_BUILDER
#!/bin/sh -e
FILENAME="kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/${FILENAME}&as_json=1" | jq -r '"\(.preferred)\(.path_info)"')
echo "Downloading Kafka from $url"
wget -q "${url}" -O /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz
\ No newline at end of file
......@@ -2,7 +2,7 @@ FROM bscdataclay/dsjava:2.4
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated curl wget jq >/dev/null \
&& apt-get install --no-install-recommends -y --allow-unauthenticated curl wget jq mosquitto mosquitto-clients >/dev/null \
&& rm -rf /var/lib/apt/lists/*
# KAFKA
......@@ -11,13 +11,13 @@ ENV SCALA_VERSION=2.12
ENV KAFKA_HOME=/opt/kafka
ENV PATH=${PATH}:${KAFKA_HOME}/bin
RUN mkdir -p ${KAFKA_HOME}
COPY ./download_kafka.sh ./download_kafka.sh
# Download kafka
RUN ./download_kafka.sh
RUN tar -xzf /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz --directory ${KAFKA_HOME} --strip-components=1
# Download kafka
RUN url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz&as_json=1" | jq -r '"\(.preferred)\(.path_info)"') \
&& wget -q "${url}" -O /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \
&& tar -xzf /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz --directory ${KAFKA_HOME} --strip-components=1
#COPY ./execClasses ${DATACLAY_HOME}/execClasses
COPY ./tmp/execClasses ${DATACLAY_HOME}/execClasses
# Execute
# Don't use CMD in order to keep compatibility with singularity container's generator
......
......@@ -2,7 +2,7 @@ FROM bscdataclay/dspython:2.4
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated curl wget jq >/dev/null \
&& apt-get install --no-install-recommends -y --allow-unauthenticated curl wget jq mosquitto mosquitto-clients >/dev/null \
&& rm -rf /var/lib/apt/lists/*
# KAFKA
......@@ -11,13 +11,14 @@ ENV SCALA_VERSION=2.12
ENV KAFKA_HOME=/opt/kafka
ENV PATH=${PATH}:${KAFKA_HOME}/bin
RUN mkdir -p ${KAFKA_HOME}
COPY ./download_kafka.sh ./download_kafka.sh
# Download kafka
RUN ./download_kafka.sh
RUN tar -xzf /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz --directory ${KAFKA_HOME} --strip-components=1
RUN url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz&as_json=1" | jq -r '"\(.preferred)\(.path_info)"') \
&& wget -q "${url}" -O /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \
&& tar -xzf /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz --directory ${KAFKA_HOME} --strip-components=1
#COPY ./deploy ${DATACLAY_HOME}/deploy
COPY ./tmp/deploy ${DATACLAY_HOME}/deploy