Skip to content

Commit

Permalink
change names
Browse files Browse the repository at this point in the history
  • Loading branch information
lacava committed Jul 17, 2024
1 parent b68201f commit 899c9b2
Show file tree
Hide file tree
Showing 6 changed files with 67 additions and 249 deletions.
File renamed without changes.
77 changes: 43 additions & 34 deletions baseDockerfile
Original file line number Diff line number Diff line change
@@ -1,63 +1,72 @@
#################################################################################
## Notes: this image is large and many improvements are possible.
## Sources:
## - https://uwekorn.com/2021/03/01/deploying-conda-environments-in-docker-how-to-do-it-right.html
## - https://pythonspeed.com/articles/conda-docker-image-size/
## micromamba is failing for PySR, so sticking with mambaforge for now.
## FROM --platform=linux/amd64 mambaorg/micromamba:0.21.2 as build
##FROM condaforge/mambaforge:4.11.0-2 as base
## FROM condaforge/miniforge-pypy3:24.3.0-0 AS base
#FROM condaforge/miniforge-pypy3:23.11.0-0 AS base
#################################################################################
## Nvidia code ##################################################################
#################################################################################
#ENV PATH /usr/local/nvidia/bin/:$PATH
#ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64:$LD_LIBRARY_PATH
## Tell nvidia-docker the driver spec that we need as well as to
## use all available devices, which are mounted at /usr/local/nvidia.
## The LABEL supports an older version of nvidia-docker, the env
## variables a newer one.
#ENV NVIDIA_VISIBLE_DEVICES all
#ENV NVIDIA_DRIVER_CAPABILITIES compute,utility
#LABEL com.nvidia.volumes.needed="nvidia_driver"
################################################################################
# Notes: this image is large and many improvements are possible.
# Sources:
# - https://uwekorn.com/2021/03/01/deploying-conda-environments-in-docker-how-to-do-it-right.html
# - https://pythonspeed.com/articles/conda-docker-image-size/
# micromamba is failing for PySR, so sticking with mambaforge for now.
# FROM --platform=linux/amd64 mambaorg/micromamba:0.21.2 as build
#FROM condaforge/mambaforge:4.11.0-2 as base
# FROM condaforge/miniforge-pypy3:24.3.0-0 AS base
FROM condaforge/miniforge-pypy3:23.11.0-0 AS base
################################################################################
# Nvidia code ##################################################################
################################################################################
ENV PATH /usr/local/nvidia/bin/:$PATH
ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64:$LD_LIBRARY_PATH
# Tell nvidia-docker the driver spec that we need as well as to
# use all available devices, which are mounted at /usr/local/nvidia.
# The LABEL supports an older version of nvidia-docker, the env
# variables a newer one.
ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility
LABEL com.nvidia.volumes.needed="nvidia_driver"
################################################################################
FROM mambaorg/micromamba:1.5.8

# Install base packages.
USER root

ARG DEBIAN_FRONTEND=noninteractive

# proxy for apt
# # proxy for apt
# ENV MIRROR="mirrors\.ocf\.berkeley\.edu"

# RUN sed -i -e "s/archive\.ubuntu\.com/${MIRROR}/" /etc/apt/sources.list
# RUN sed -i -e "s/security\.ubuntu\.com/${MIRROR}/" /etc/apt/sources.list
RUN sed -i -e "s/http/https/" /etc/apt/sources.list
# RUN sed -i -e "s/http/https/" /etc/apt/sources.list
#
# ENV MIRROR="debian\.csail\.mit\.edu"
# ENV MIRROR="us\.debian\.org"
# ENV MIRROR="debian\.cc\.lehigh\.edu"
# ENV MIRROR="debian\.cs\.binghamton\.edu"
# ENV MIRROR="debian\.mirror\.constant\.com"

# RUN sed -i -e "s/deb\.debian\.org/${MIRROR}/" /etc/apt/sources.list.d/debian.sources
RUN sed -i -e "s/http/https/" /etc/apt/sources.list.d/debian.sources
# COPY debian.sources /etc/apt/list/sources.list.d/
RUN apt update \
&& apt install -y \
default-jdk \
&& apt install -y \
# default-jdk \
rsync \
# bzip2 \
# ca-certificates \
curl \
# git \
git \
# wget \
build-essential \
libgmp3-dev \
libblas-dev \
liblapack-dev \
libgsl-dev \
vim \
jq \
# jq \
&& rm -rf /var/lib/apt/lists/*

USER $MAMBA_USER

# Install env
################################################################################
#USER $MAMBA_USER
SHELL ["/bin/bash", "-c"]
##VOLUME ["/srbench"]
#RUN mkdir /srbench
#WORKDIR "/srbench"
#COPY base_environment-lock.yml .
#RUN mamba env create -f base_environment-lock.yml
# SHELL ["/bin/bash", "-c"]
#VOLUME ["/srbench"]
# WORKDIR "/srbench"
107 changes: 22 additions & 85 deletions install_algorithm.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,119 +1,56 @@
# install one algorithm, located in directory passed as $1
# note: make sure conda srbench environment is installed
# install one algorithm with micromamba, located in directory passed as $1
set -e

# script to read yaml
function parse_yaml {
local prefix=$2
local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034')
sed -ne "s|^\($s\):|\1|" \
-e "s|^\($s\)\($w\)$s:$s[\"']\(.*\)[\"']$s\$|\1$fs\2$fs\3|p" \
-e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 |
awk -F$fs '{
indent = length($1)/2;
vname[indent] = $2;
for (i in vname) {if (i > indent) {delete vname[i]}}
if (length($3) > 0) {
vn=""; for (i=0; i<indent; i++) {vn=(vn)(vname[i])("_")}
printf("%s%s%s=\"%s\"\n", "'$prefix'",vn, $2, $3);
}
}'
}


SUBNAME=$(basename $1)
SUBFOLDER="$(dirname $1)/${SUBNAME}"
SUBENV="srbench-$SUBNAME"
# SUBENV="base"
SUBENV="base"

echo "SUBNAME: ${SUBNAME} ; SUBFOLDER: ${SUBFOLDER}"

install_base() {
# install base srbench environment if it doesn't exist
if conda info --envs | grep srbench | grep -v "srbench-"; then
echo "existing base srbench environment (not installing)";
else
echo "installing base srbench environment"

mamba env create -f base_environment.yml

eval "$(conda shell.bash hook)"

conda init bash
fi
}


echo "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv"
echo ".................... Installing $SUBNAME ..."


build_clone_base_env="yes"
########################################
# read yaml
eval $(parse_yaml $SUBFOLDER/metadata.yml)
########################################

echo "build_clone_base_env: ${build_clone_base_env}"
# if [ ${build_clone_base_env} == "yes" ] && [ ! -f "${SUBFOLDER}/environment-lock.yml" ] ; then

# echo "........................................"
# echo "Cloning base environment"
# echo "........................................"

# if conda info --envs | grep -q $SUBENV ; then
# echo "not cloning base because ${SUBENV} already exists"
# else
# install_base
# conda create --name $SUBENV --clone srbench
# # update from base environment
# if test -f "${SUBFOLDER}/environment.yml" ; then
# echo "Update alg env from environment.yml"
# echo "........................................"
# mamba env update -n $SUBENV -f ${SUBFOLDER}/environment.yml
# fi
# fi
# else

echo "........................................"
echo "Creating environment ${SUBENV} from scratch"
echo "Creating environment"
echo "........................................"
if test -f "${SUBFOLDER}/environment-lock.yml" ; then
echo "using ${SUBFOLDER}/environment-lock.yml"
mamba env update -n $SUBENV --file ${SUBFOLDER}/environment-lock.yml
add_base_env=true
if test -f "${SUBFOLDER}/environment.lock" ; then
echo "using ${SUBFOLDER}/environment.lock"
micromamba install -n base -y -f ${SUBFOLDER}/environment.lock
add_base_env=false
elif test -f "${SUBFOLDER}/environment.yml" ; then
echo "using ${SUBFOLDER}/environment.yml ... "
mamba env update -n $SUBENV -f ${SUBFOLDER}/environment.yml
# else
# echo "creating blank environment..."
# mamba create --name $SUBENV
micromamba install -n base -y -f ${SUBFOLDER}/environment.yml
fi
# fi

if test -f "${SUBFOLDER}/requirements.txt" ; then
echo "Update alg env from requirements.txt"
echo "........................................"
mamba run -n ${SUBENV} pip install -r ${SUBFOLDER}/requirements.txt
micromamba install -n base -y -c conda-forge pip
pip install -r ${SUBFOLDER}/requirements.txt
pip cache purge
fi

cd $SUBFOLDER
if test -f "install.sh" ; then
if $add_base_env ; then
micromamba install -n base -y -f base_environment.yml
fi

if test -f "${SUBFOLDER}/install.sh" ; then
echo "running install.sh..."
echo "........................................"
mamba run -n $SUBENV bash install.sh
cd $SUBFOLDER
micromamba run -n base bash install.sh
cd -
else
echo "::warning::No install.sh file found in ${SUBFOLDER}."
echo " Assuming the method is a conda package specified in environment.yml."
fi
cd -

# update with base package dependencies
mamba env update -n $SUBENV -f base_environment.yml

# export env
echo "Exporting environment"
echo "........................................"
# conda env export -n $SUBENV > $SUBFOLDER/environment.lock.yml
conda env export -n $SUBENV > $SUBFOLDER/environment.lock.yml
conda env export -n $SUBENV
# conda env export > $SUBFOLDER/environment.lock.yml
micromamba env export --explicit --md5 > $SUBFOLDER/environment.lock
echo "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
72 changes: 0 additions & 72 deletions microBaseDockerfile

This file was deleted.

56 changes: 0 additions & 56 deletions microinstall_algorithm.sh

This file was deleted.

4 changes: 2 additions & 2 deletions scripts/make_docker_compose_file.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ algorithms=$(ls algorithms/)

for alg in ${algorithms[@]} ; do
# allow user to specify their own Dockerfile.
# otherwise use the default one (argDockerfile)
# otherwise use the default one (alg-Dockerfile)
if test -f "./algorithms/${alg}/Dockerfile" ; then
dockerfile="./algorithms/${alg}/Dockerfile"
else
dockerfile="argDockerfile"
dockerfile="alg-Dockerfile"
fi

cat <<EOF >> docker-compose.yml
Expand Down

0 comments on commit 899c9b2

Please sign in to comment.