Last active
October 7, 2025 04:46
-
-
Save jth0/61ecc6e91af481d9bb30690720b1d21f to your computer and use it in GitHub Desktop.
Dockerfile for Posit Workbench in GCP
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Base image that includes Posit session components | |
| FROM rstudio/workbench-session:ubuntu2204-r4.4.3_4.3.3-py3.12.11_3.11.13 | |
| # External build script passes in these values, either parsed out of the image name above or defined externally | |
| # to manually specify a different version to use. | |
| ARG PYTHON_VERSION | |
| ARG R_VERSION | |
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | |
| # Install requirements for later via apt-get, some of this is probably redundant... | |
| # and yes, "OMG YER UPGRADIN' IT ALL!!!" -- yep, if they installed it, I'ma patch it. | |
| RUN apt-get update && \ | |
| apt-get upgrade -y && \ | |
| apt-get install -y gdebi-core curl apt-transport-https gnupg git python3-dev g++ unzip zip ca-certificates && \ | |
| # Neither Google's instructions or GitHub's instructions get this clean and clear... | |
| # - GitHub's keyring file is in the right format (an OpenPGP Public Key Version 4) but their instructions put it | |
| # in /etc/apt/keyrings/ or something | |
| # - Google's keyring is just a PGP public key block, so you need to convert it. Google also then drops the file | |
| # in the wrong spot - /usr/local/keryings/ or something... | |
| # These keyrings need to go in /etc/apt/trusted.gpg.d/ for apt-get to pick them up correctly | |
| # (at least this was what we needed in our GCP/k8s/Ubuntu 2204 and 2404 servers...) | |
| # | |
| # So save to the right folder: | |
| curl -o /etc/apt/trusted.gpg.d/githubcli-archive-keyring.gpg https://cli.github.com/packages/githubcli-archive-keyring.gpg && \ | |
| # and convert, then save to the right folder: | |
| curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /etc/apt/trusted.gpg.d/cloud.google.gpg add - && \ | |
| # then add the apt repo entries, also pointing to the right folder (which apt didn't seem to see in the former locations) | |
| echo "deb [signed-by=/etc/apt/trusted.gpg.d/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee -a /etc/apt/sources.list.d/github-cli.list && \ | |
| echo "deb [signed-by=/etc/apt/trusted.gpg.d/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ | |
| # then go grab a fresh apt list and install these last two packages and requirements | |
| apt-get update && \ | |
| apt-get install -y gh google-cloud-sdk | |
| # Install GoDaddy Root Certs for cert validation of our Connect server. | |
| # Why these aren't already included/distributed is beyond me... | |
| RUN curl -o /usr/local/share/ca-certificates/gd_bundle-g2-g1.crt https://certs.godaddy.com/repository/gd_bundle-g2-g1.crt && \ | |
| update-ca-certificates | |
| # Install Bazelisk (instead of Bazel) for one of the data science team package requirements. Versions of bazel | |
| # get managed by bazelisk in the individual project. | |
| RUN curl -L https://github.com/bazelbuild/bazelisk/releases/latest/download/bazelisk-linux-amd64 \ | |
| -o /usr/local/bin/bazel && \ | |
| chmod +x /usr/local/bin/bazel | |
| # Install R requirements -- this is very site-dependent, and takes a long time to run, | |
| # but speeds things up in use. Other version crap is taken care of with renv. | |
| RUN /opt/R/${R_VERSION}/bin/R -e 'install.packages(c("gt","dbplyr","languageserver","shinydashboard", "renv"), \ | |
| dependencies=TRUE, repos="https://packagemanager.rstudio.com/cran/__linux__/jammy/latest")' | |
| # Update all installed python packages - this one is also just "it's here, so patch it" hygiene for the three | |
| # local (that is, non-Ubuntu/OS-managed) installs. Other version crap is covered by uv/venv. | |
| RUN outdated=$(/opt/python/jupyter/bin/pip list --outdated | awk 'NR>2 {print $1}') && \ | |
| # capture the output of the previous command in case there are zero outdated packages so our docker build continues | |
| if [ -n "$(outdated)" ] ; then \ | |
| echo "$outdated" | xargs -n1 /opt/python/3.11.13/bin/pip install -U ; fi && \ | |
| outdated=$(/opt/python/3.11.13/bin/pip list --outdated | awk 'NR>2 {print $1}') && \ | |
| if [ -n "$(outdated)" ] ; then \ | |
| echo "$outdated" | xargs -n1 /opt/python/default/bin/pip install -U ; fi && \ | |
| outdated=$(/opt/python/default/bin/pip list --outdated | awk 'NR>2 {print $1}') && \ | |
| if [ -n "$(outdated)" ] ; then \ | |
| echo "$outdated" | xargs -n1 /opt/python/default/bin/pip install -U ; fi | |
| # Install Python requirements - again, very specific to us. uv is the only real important one here, since we can't | |
| # rely on it or bootstrap our packages without it... | |
| # We may need to drop poetry in here too, but so far that hasn't been needed. | |
| RUN pip install --upgrade --no-cache-dir \ | |
| pip \ | |
| uv \ | |
| google-cloud-bigquery \ | |
| google-cloud-storage \ | |
| bigquery \ | |
| # we need keyring to try and grab python packages from our private GCP artifact repository (GAR) | |
| keyring \ | |
| keyrings.google-artifactregistry-auth && \ | |
| # assuming these cleanup commands aren't needed with the --no-cache-dir option set above | |
| find /opt/python -name '*.pyc' -delete \ | |
| && find /opt/python -name '__pycache__' -type d -exec rm -r {} + | |
| # Copy in the externally defined extension configs for vscode and positron | |
| COPY vscode.extensions.conf positron.extensions.conf /etc/rstudio/ | |
| # Clean up unnecessary files to reduce image size -- I'm not sure this is actually needed, will check next image refresh | |
| RUN rm -rf ~/.cache /tmp/* | |
| # So internal clients can talk to the Workbench launcher | |
| EXPOSE 8788/tcp |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment