Nb_conda_kernels not seen in the jupyterhub notebook

Building the custom nb_conda_kernels for the notebook.

DockerFile:

This includes cublas and cudnn

FROM nvidia/cuda:12.1.1-cudnn8-runtime-rockylinux8

USER root

RUN echo “new run”

python/system deps

RUN yum -y update --disablerepo=cuda && yum -y upgrade --disablerepo=cuda && yum clean all
RUN yum install -y nss-pam-ldapd mesa-libGL tree
RUN yum install -y openssh-server openssh-clients
RUN yum install -y git curl epel-release
RUN yum install -y texlive-* texlive-ucharcat texlive-tcolorbox
RUN sed -i ‘s/sss/ldap/g’ /etc/nsswitch.conf

Miniforge and build tools

RUN yum -y install  bzip2 
&& curl -sSL  “https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh”  -o /tmp/miniforge.sh 
&& bash /tmp/miniforge.sh  -bfp /usr/local/ 
&& rm -rf /tmp/miniforge.sh 
&& conda install -y python=3.12 conda-build mamba 
&& conda update -y conda 
&& conda clean --all --yes

env vars

ENV CUDA_PATH=/usr/local/cuda
ENV LD_LIBRARY_PATH=$CUDA_PATH/lib64:$LD_LIBRARY_PATH

Build and install custom kernel package

COPY nb_conda_kernels_dls/ nb_conda_kernels_dls
RUN conda build nb_conda_kernels_dls

Install from local build with conda

RUN conda install --use-local nb_conda_kernels_dls

RUN rm -rf /usr/local/etc/jupyter/jupyter_notebook_config.d/jupyterlab.json

Create environment
COPY environment.yml environment.yml
RUN mamba  env create -f environment.yml

Re-run mamba env
RUN mamba install -n jupyterhub-notebook -c file:///usr/local/conda-bld  -c conda-forge  nb_conda_kernels_dls

Add config file to end directory
COPY jupyter_server_config.json /usr/local/envs/jupyterhub-notebook/etc/jupyter/jupyter_server_config.json


Startup

COPY entrypoint.bash /usr/local/bin/entrypoint.bash
ENTRYPOINT [“/usr/local/bin/entrypoint.bash”]

Env.yml

name: jupyterhub-notebook
channels:
 conda-forge
 local
dependencies:
jupyter=1.0.0
jupyterlab=4.3.0
jupyterhub=5.2.1
python=3.12
defusedxml=0.7.1
ipyparallel
nb_conda_kernels=2.5.1
nb_conda_kernels_dls
ipympl
widgetsnbextension
dask
distributed
jupyterlab-git
dask-labextension
ipykernel=7.2.0
nbconvert=7.16.4

jupyter_server_config.json

{
“ServerApp”: {
“kernel_spec_manager_class”: “nb_conda_kernels_dls.DlsCondaKernelSpecManager”
},
“CondaKernelSpecManager”: {
“name_format”: “{0} [{1}]”
}
}

nb_conda_kernels_dls.py

from nb_conda_kernels import CondaKernelSpecManager
from nb_conda_kernels.manager import RUNNER_COMMAND
from os.path import join, split, dirname, basename, abspath
import os
import glob
import json

class DlsCondaKernelSpecManager(CondaKernelSpecManager):
“”" A custom CondaKernelSpecManager that will use the kernels defined in the jupyterhub-dls, AND ONLY THOSE!“”"

def __init__(self, **kwargs):
    super(DlsCondaKernelSpecManager, self).__init__(conda_only=True, **kwargs)

def _all_envs(self):
    all_envs = {}
    conda_path = os.environ['CONDA_PREFIX']
    all_envs = {'DLS Conda': conda_path}
   
    user_path = os.path.expanduser('~/.conda/envs/')
    for env in glob.glob(user_path + '*'):
        all_envs['User Conda - '+ basename(env)] = env
    return all_envs

def _all_specs(self):
    """ Find the all kernel specs in all environments.

        Returns a dict with unique env names as keys, and the kernel.json
        content as values, modified so that they can be run properly in
        their native environments.

        Caches the information for CACHE_TIMEOUT seconds, as this is
        relatively expensive.
    """

    all_specs = {}
    # We need to be able to find conda-run in the base conda environment
    # even if this package is not running there
    conda_prefix = self._conda_info['conda_prefix']
    all_envs = self._all_envs()
    for env_name, env_path in all_envs.items():
        kspec_base = join(env_path, 'share', 'jupyter', 'kernels','*')
        kspec_glob = glob.glob(join(kspec_base, '*kernel.json'))
        for spec_path in kspec_glob:
            try:
                with open(spec_path) as fp:
                    spec = json.load(fp)
            except Exception as err:
                self.log.error("[nb_conda_kernels] error loading %s:\n%s",
                               spec_path, err)
                continue
            kernel_dir = dirname(spec_path).lower()
            if basename(spec_path)=='kernel.json':
                raw_kernel_name = basename(kernel_dir)
            else:
                raw_kernel_name = basename(spec_path)
            kernel_prefix = '' if env_name == 'root' else 'env-'
            kernel_name = u'conda-{}{}-{}'.format(kernel_prefix, env_name, raw_kernel_name)
            # Replace invalid characters with dashes
            kernel_name = self.clean_kernel_name(kernel_name)
            display_prefix = spec['display_name']
            display_name = self.name_format.format(display_prefix, env_name, conda_kernel=kernel_name,
                display_name=spec['display_name'],
                environment=env_name,
                kernel=raw_kernel_name, language=display_prefix)
            spec['display_name'] = display_name
            env_path = dirname(dirname(spec['argv'][0]))
            spec['argv'] = RUNNER_COMMAND + [conda_prefix, env_path] + spec['argv']
            spec['resource_dir'] = abspath(kernel_dir)
            all_specs[kernel_name] = spec
    return all_specs

setup.py

from setuptools import setup

setup(name=‘nb_conda_kernels_dls’,
version=‘0.2’,
py_modules=[‘nb_conda_kernels_dls’],
description=‘Custom CondaKernelSpecManager class for JupyterHub’,
)

meta.yml

{% set version = “0.2” %}

package:
name: nb_conda_kernels_dls
version: {{ version }}

source:
path: ..

build:
number: 0
script: “{{ PYTHON }} -m pip install . --no-deps --ignore-installed --no-cache-dir -vvv”
noarch: python

requirements:
host:
- python
- pip
- setuptools
run:
- python
- nb_conda_kernels

test:
imports:
- nb_conda_kernels_dls

entrypoint.bash

#!/bin/bash
echo “>>> exporting vars”
$PYTHON_HOME provides address of additional envs
export PATH=$PYTHON_HOME/bin:$PATH
echo “>>> PATH is”
echo $PATH

echo “>>> activating venv”
source activate /usr/local/envs/jupyterhub-notebook
echo “>>> env is:”
env

cd $HOME
echo “>>> calling hub”
jupyterhub-singleuser --ip=‘0.0.0.0’ --port=8888 --debug

Actually, this occurred while migrating from miniconda to miniforge. I am not able to see the kernels in the notebook.

Am I missing something here ?

Thanks