Compare commits

...

11 Commits

Author SHA1 Message Date
Michel Peterson 053af28f2b
Merge 0d928774ce into 55c36d62ff 2025-05-30 12:36:30 -07:00
Travis Glenn Hansen 55c36d62ff newer ubuntu image for ci
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-05-30 08:14:54 -06:00
Travis Glenn Hansen edfdf86f2d fix for scale with ssh driver
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-05-30 07:59:09 -06:00
Travis Glenn Hansen 73af26298c handling nvme hostid/hostnqn files more robustly
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-04-06 11:02:21 -06:00
Travis Glenn Hansen 98f99bc761 fix node deps
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-04-05 19:47:08 -06:00
Travis Glenn Hansen 1ed366c545 remove unused deps
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-04-05 19:01:48 -06:00
Travis Glenn Hansen a032ed3e18 fix registry usage
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-04-05 17:53:59 -06:00
Travis Glenn Hansen 957d7fc6bc support TN 25.04, env vars in config, improved Dockerfiles
Signed-off-by: Travis Glenn Hansen <travisghansen@yahoo.com>
2025-04-05 17:16:52 -06:00
Travis Glenn Hansen d05fe2c4f4 Merge remote-tracking branch 'origin/master' into next 2025-03-29 08:29:55 -06:00
Travis Glenn Hansen 8193b689ed
Merge pull request #452 from d-uzlov/fix-global-registry
refactor: stop using global variable for cache registry
2025-03-24 15:37:37 -06:00
Danil Uzlov 4e402645d9 use registry from context 2025-02-03 22:06:20 +00:00
27 changed files with 6046 additions and 234 deletions

View File

@ -20,7 +20,7 @@ jobs:
access_token: ${{ github.token }} access_token: ${{ github.token }}
build-npm-linux-amd64: build-npm-linux-amd64:
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
@ -115,7 +115,7 @@ jobs:
SYNOLOGY_PASSWORD: ${{ secrets.SANITY_SYNOLOGY_PASSWORD }} SYNOLOGY_PASSWORD: ${{ secrets.SANITY_SYNOLOGY_PASSWORD }}
SYNOLOGY_VOLUME: ${{ secrets.SANITY_SYNOLOGY_VOLUME }} SYNOLOGY_VOLUME: ${{ secrets.SANITY_SYNOLOGY_VOLUME }}
csi-sanity-truenas-scale-24_10: csi-sanity-truenas-scale-25_04:
needs: needs:
- build-npm-linux-amd64 - build-npm-linux-amd64
strategy: strategy:
@ -123,10 +123,10 @@ jobs:
max-parallel: 1 max-parallel: 1
matrix: matrix:
config: config:
- truenas/scale/24.10/scale-iscsi.yaml - truenas/scale/25.04/scale-iscsi.yaml
- truenas/scale/24.10/scale-nfs.yaml - truenas/scale/25.04/scale-nfs.yaml
# 80 char limit # 80 char limit
- truenas/scale/24.10/scale-smb.yaml - truenas/scale/25.04/scale-smb.yaml
runs-on: runs-on:
- self-hosted - self-hosted
- Linux - Linux
@ -436,7 +436,7 @@ jobs:
- determine-image-tag - determine-image-tag
- csi-sanity-synology-dsm6 - csi-sanity-synology-dsm6
- csi-sanity-synology-dsm7 - csi-sanity-synology-dsm7
- csi-sanity-truenas-scale-24_10 - csi-sanity-truenas-scale-25_04
- csi-sanity-truenas-core-13_0 - csi-sanity-truenas-core-13_0
- csi-sanity-zfs-generic - csi-sanity-zfs-generic
- csi-sanity-objectivefs - csi-sanity-objectivefs
@ -476,7 +476,7 @@ jobs:
needs: needs:
- csi-sanity-synology-dsm6 - csi-sanity-synology-dsm6
- csi-sanity-synology-dsm7 - csi-sanity-synology-dsm7
- csi-sanity-truenas-scale-24_10 - csi-sanity-truenas-scale-25_04
- csi-sanity-truenas-core-13_0 - csi-sanity-truenas-core-13_0
- csi-sanity-zfs-generic - csi-sanity-zfs-generic
- csi-sanity-objectivefs - csi-sanity-objectivefs

View File

@ -1,3 +1,6 @@
# docker build --pull -t foobar .
# docker buildx build --pull -t foobar --platform linux/amd64,linux/arm64,linux/arm/v7,linux/s390x,linux/ppc64le .
FROM debian:12-slim AS build FROM debian:12-slim AS build
#FROM --platform=$BUILDPLATFORM debian:10-slim AS build #FROM --platform=$BUILDPLATFORM debian:10-slim AS build
@ -12,11 +15,11 @@ RUN apt-get update && apt-get install -y locales && rm -rf /var/lib/apt/lists/*
&& localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 && localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
ENV LANG=en_US.utf8 ENV LANG=en_US.utf8
ENV NODE_VERSION=v20.11.1 ENV NODE_VERSION=v20.19.0
ENV NODE_ENV=production ENV NODE_ENV=production
# install build deps # install build deps
RUN apt-get update && apt-get install -y python3 make cmake gcc g++ #RUN apt-get update && apt-get install -y python3 make cmake gcc g++
# install node # install node
RUN apt-get update && apt-get install -y wget xz-utils RUN apt-get update && apt-get install -y wget xz-utils
@ -24,6 +27,13 @@ ADD docker/node-installer.sh /usr/local/sbin
RUN chmod +x /usr/local/sbin/node-installer.sh && node-installer.sh RUN chmod +x /usr/local/sbin/node-installer.sh && node-installer.sh
ENV PATH=/usr/local/lib/nodejs/bin:$PATH ENV PATH=/usr/local/lib/nodejs/bin:$PATH
# Workaround for https://github.com/nodejs/node/issues/37219
RUN test $(uname -m) != armv7l || ( \
apt-get update \
&& apt-get install -y libatomic1 \
&& rm -rf /var/lib/apt/lists/* \
)
# Run as a non-root user # Run as a non-root user
RUN useradd --create-home csi \ RUN useradd --create-home csi \
&& mkdir /home/csi/app \ && mkdir /home/csi/app \
@ -80,18 +90,26 @@ RUN apt-get update && \
apt-get install -y wget netbase zip bzip2 socat e2fsprogs exfatprogs xfsprogs btrfs-progs fatresize dosfstools ntfs-3g nfs-common cifs-utils fdisk gdisk cloud-guest-utils sudo rsync procps util-linux nvme-cli fuse3 && \ apt-get install -y wget netbase zip bzip2 socat e2fsprogs exfatprogs xfsprogs btrfs-progs fatresize dosfstools ntfs-3g nfs-common cifs-utils fdisk gdisk cloud-guest-utils sudo rsync procps util-linux nvme-cli fuse3 && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
ARG RCLONE_VERSION=1.66.0 RUN \
echo '83e7a026-2564-455b-ada6-ddbdaf0bc519' > /etc/nvme/hostid && \
echo 'nqn.2014-08.org.nvmexpress:uuid:941e4f03-2cd6-435e-86df-731b1c573d86' > /etc/nvme/hostnqn
ARG RCLONE_VERSION=1.69.1
ADD docker/rclone-installer.sh /usr/local/sbin ADD docker/rclone-installer.sh /usr/local/sbin
RUN chmod +x /usr/local/sbin/rclone-installer.sh && rclone-installer.sh RUN chmod +x /usr/local/sbin/rclone-installer.sh && rclone-installer.sh
ARG RESTIC_VERSION=0.16.4 ARG RESTIC_VERSION=0.18.0
ADD docker/restic-installer.sh /usr/local/sbin ADD docker/restic-installer.sh /usr/local/sbin
RUN chmod +x /usr/local/sbin/restic-installer.sh && restic-installer.sh RUN chmod +x /usr/local/sbin/restic-installer.sh && restic-installer.sh
ARG KOPIA_VERSION=0.16.1 ARG KOPIA_VERSION=0.19.0
ADD docker/kopia-installer.sh /usr/local/sbin ADD docker/kopia-installer.sh /usr/local/sbin
RUN chmod +x /usr/local/sbin/kopia-installer.sh && kopia-installer.sh RUN chmod +x /usr/local/sbin/kopia-installer.sh && kopia-installer.sh
ARG YQ_VERSION=v4.45.1
ADD docker/yq-installer.sh /usr/local/sbin
RUN chmod +x /usr/local/sbin/yq-installer.sh && yq-installer.sh
# controller requirements # controller requirements
#RUN apt-get update && \ #RUN apt-get update && \
# apt-get install -y ansible && \ # apt-get install -y ansible && \

View File

@ -3,9 +3,11 @@
# https://github.com/kubernetes/kubernetes/blob/master/test/images/busybox/Dockerfile_windows # https://github.com/kubernetes/kubernetes/blob/master/test/images/busybox/Dockerfile_windows
# https://github.com/kubernetes/kubernetes/tree/master/test/images#windows-test-images-considerations # https://github.com/kubernetes/kubernetes/tree/master/test/images#windows-test-images-considerations
# https://stefanscherer.github.io/find-dependencies-in-windows-containers/ # https://stefanscherer.github.io/find-dependencies-in-windows-containers/
# https://stackoverflow.com/questions/65104246/how-to-install-powershell-core-in-aspnet-nanoserver-docker-container
# #
# docker build --build-arg NANO_BASE_TAG=1809 --build-arg CORE_BASE_TAG=ltsc2019 -t foobar -f Dockerfile.Windows . # docker build --build-arg NANO_BASE_TAG=1809 --build-arg CORE_BASE_TAG=ltsc2019 -t foobar -f Dockerfile.Windows .
# docker run --rm -ti --entrypoint powershell foobar # docker run --rm -ti --entrypoint powershell foobar
# docker run --rm -ti --entrypoint cmd foobar
# docker run --rm foobar # docker run --rm foobar
# docker save foobar -o foobar.tar # docker save foobar -o foobar.tar
# buildah pull docker-archive:foobar.tar # buildah pull docker-archive:foobar.tar
@ -16,85 +18,96 @@
ARG NANO_BASE_TAG ARG NANO_BASE_TAG
ARG CORE_BASE_TAG ARG CORE_BASE_TAG
FROM mcr.microsoft.com/windows/servercore:${CORE_BASE_TAG} as powershell
# install powershell
ENV PS_VERSION=6.2.7
ADD https://github.com/PowerShell/PowerShell/releases/download/v$PS_VERSION/PowerShell-$PS_VERSION-win-x64.zip /PowerShell/powershell.zip
RUN cd C:\PowerShell &\
tar.exe -xf powershell.zip &\
del powershell.zip &\
mklink powershell.exe pwsh.exe
FROM mcr.microsoft.com/windows/servercore:${CORE_BASE_TAG} as build FROM mcr.microsoft.com/windows/servercore:${CORE_BASE_TAG} as build
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"] SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
#ENV GPG_VERSION 4.0.2 ENV POWERSHELL_TELEMETRY_OPTOUT="1"
ENV GPG_VERSION 2.3.4
RUN Invoke-WebRequest $('https://files.gpg4win.org/gpg4win-vanilla-{0}.exe' -f $env:GPG_VERSION) -OutFile 'gpg4win.exe' -UseBasicParsing ; \ ARG PS_VERSION=7.5.0
Start-Process .\gpg4win.exe -ArgumentList '/S' -NoNewWindow -Wait ADD https://github.com/PowerShell/PowerShell/releases/download/v$PS_VERSION/PowerShell-$PS_VERSION-win-x64.zip /PowerShell/powershell.zip
# https://github.com/nodejs/node#release-keys RUN \
RUN @( \ Expand-Archive '/PowerShell/powershell.zip' -DestinationPath '/PowerShell' ; \
'4ED778F539E3634C779C87C6D7062848A1AB005C', \ cd C:\PowerShell ; \
'141F07595B7B3FFE74309A937405533BE57C7D57', \ del powershell.zip ; \
'94AE36675C464D64BAFA68DD7434390BDBE9B9C5', \ New-Item -ItemType SymbolicLink -Path "powershell.exe" -Target "pwsh.exe"
'74F12602B6F1C4E913FAA37AD3A89613643B6201', \
'71DCFD284A79C3B38668286BC97EC7A07EDE3FC1', \
'61FC681DFB92A079F1685E77973F295594EC4689', \
'8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600', \
'C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8', \
'C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C', \
'DD8F2338BAE7501E3DD5AC78C273792F7D83545D', \
'A48C2BEE680E841632CD4E44F07496B3EB3C1762', \
'108F52B48DB57BB0CC439B2997B01419BD92F80A', \
'B9E2F5981AA6E0CD28160D9FF13993A75599653C' \
) | foreach { \
gpg --keyserver hkps://keys.openpgp.org --recv-keys $_ ; \
}
ENV NODE_VERSION 16.18.0
RUN Invoke-WebRequest $('https://nodejs.org/dist/v{0}/SHASUMS256.txt.asc' -f $env:NODE_VERSION) -OutFile 'SHASUMS256.txt.asc' -UseBasicParsing ;
#RUN Invoke-WebRequest $('https://nodejs.org/dist/v{0}/SHASUMS256.txt.asc' -f $env:NODE_VERSION) -OutFile 'SHASUMS256.txt.asc' -UseBasicParsing ; \
# gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc
#gpg --verify SHASUMS256.txt.sig SHASUMS256.txt
ENV NODE_VERSION 20.19.0
ENV NODE_ENV=production
RUN Invoke-WebRequest $('https://nodejs.org/dist/v{0}/node-v{0}-win-x64.zip' -f $env:NODE_VERSION) -OutFile 'node.zip' -UseBasicParsing ; \ RUN Invoke-WebRequest $('https://nodejs.org/dist/v{0}/node-v{0}-win-x64.zip' -f $env:NODE_VERSION) -OutFile 'node.zip' -UseBasicParsing ; \
$sum = $(cat SHASUMS256.txt.asc | sls $(' node-v{0}-win-x64.zip' -f $env:NODE_VERSION)) -Split ' ' ; \ Expand-Archive node.zip -DestinationPath C:\ ; \
if ((Get-FileHash node.zip -Algorithm sha256).Hash -ne $sum[0]) { Write-Error 'SHA256 mismatch' } ; \ Rename-Item -Path $('C:\node-v{0}-win-x64' -f $env:NODE_VERSION) -NewName 'C:\nodejs'
Expand-Archive node.zip -DestinationPath C:\ ; \
Rename-Item -Path $('C:\node-v{0}-win-x64' -f $env:NODE_VERSION) -NewName 'C:\nodejs'
RUN mkdir \usr\local\bin; mkdir \tmp
ARG RCLONE_VERSION=v1.69.1
RUN Invoke-WebRequest "https://github.com/rclone/rclone/releases/download/${env:RCLONE_VERSION}/rclone-${env:RCLONE_VERSION}-windows-amd64.zip" -OutFile '/tmp/rclone.zip' -UseBasicParsing ; \
Expand-Archive C:\tmp\rclone.zip -DestinationPath C:\tmp ; \
Copy-Item $('C:\tmp\rclone-{0}-windows-amd64\rclone.exe' -f $env:RCLONE_VERSION) -Destination "C:\usr\local\bin"
ARG RESTIC_VERSION=0.18.0
RUN Invoke-WebRequest "https://github.com/restic/restic/releases/download/v${env:RESTIC_VERSION}/restic_${env:RESTIC_VERSION}_windows_amd64.zip" -OutFile '/tmp/restic.zip' -UseBasicParsing ; \
Expand-Archive C:\tmp\restic.zip -DestinationPath C:\tmp ; \
Copy-Item $('C:\tmp\restic_{0}_windows_amd64.exe' -f $env:RESTIC_VERSION) -Destination "C:\usr\local\bin\restic.exe"
ARG KOPIA_VERSION=0.19.0
RUN Invoke-WebRequest "https://github.com/kopia/kopia/releases/download/v${env:KOPIA_VERSION}/kopia-${env:KOPIA_VERSION}-windows-x64.zip" -OutFile '/tmp/kopia.zip' -UseBasicParsing ; \
Expand-Archive C:\tmp\kopia.zip -DestinationPath C:\tmp ; \
Copy-Item $('C:\tmp\kopia-{0}-windows-x64\kopia.exe' -f $env:KOPIA_VERSION) -Destination "C:\usr\local\bin"
ARG YQ_VERSION=v4.45.1
RUN Invoke-WebRequest "https://github.com/mikefarah/yq/releases/download/${env:YQ_VERSION}/yq_windows_amd64.zip" -OutFile '/tmp/yq.zip' -UseBasicParsing ; \
Expand-Archive C:\tmp\yq.zip -DestinationPath C:\tmp ; \
Copy-Item $('C:\tmp\yq_windows_amd64.exe') -Destination "C:\usr\local\bin\yq.exe"
RUN Remove-Item C:\tmp\ -Force -Recurse
# install app
#RUN setx /M PATH "%PATH%;C:\nodejs" #RUN setx /M PATH "%PATH%;C:\nodejs"
RUN setx /M PATH $(${Env:PATH} + \";C:\nodejs\") RUN setx /M PATH $(${Env:PATH} + \";C:\nodejs\")
RUN node --version; npm --version; RUN node --version; npm --version;
RUN mkdir /app RUN mkdir /app
WORKDIR /app WORKDIR /app
COPY package*.json ./ COPY package*.json ./
RUN npm install --only=production; ls / RUN npm install --only=production; ls /
COPY . . COPY . .
######################
# actual image
######################
FROM mcr.microsoft.com/windows/nanoserver:${NANO_BASE_TAG} FROM mcr.microsoft.com/windows/nanoserver:${NANO_BASE_TAG}
SHELL ["cmd.exe", "/s" , "/c"]
#https://github.com/PowerShell/PowerShell-Docker/issues/236
# NOTE: this works for non-host process containers, but host process containers will have specials PATH requirements
# C:\Windows\System32\WindowsPowerShell\v1.0\
ENV PATH="C:\Windows\system32;C:\Windows;C:\PowerShell;C:\app\bin;"
ENV DEMOCRATIC_CSI_IS_CONTAINER=true
ENV NODE_ENV=production
LABEL org.opencontainers.image.source https://github.com/democratic-csi/democratic-csi LABEL org.opencontainers.image.source https://github.com/democratic-csi/democratic-csi
LABEL org.opencontainers.image.url https://github.com/democratic-csi/democratic-csi LABEL org.opencontainers.image.url https://github.com/democratic-csi/democratic-csi
LABEL org.opencontainers.image.licenses MIT LABEL org.opencontainers.image.licenses MIT
# if additional dlls are required can copy like this # install powershell
#COPY --from=build /Windows/System32/nltest.exe /Windows/System32/nltest.exe COPY --from=build /PowerShell /PowerShell
# install app
COPY --from=build /app /app COPY --from=build /app /app
WORKDIR /app WORKDIR /app
# this works for both host-process and non-host-process container semantics
COPY --from=build /nodejs/node.exe ./bin COPY --from=build /nodejs/node.exe ./bin
COPY --from=build /usr/local/bin/ ./bin
ENTRYPOINT [ "bin/node.exe", "--expose-gc", "bin/democratic-csi" ] SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'Continue'; $verbosePreference='Continue';"]
EXPOSE 50051
# this works for both host-process and non-host-process container semantics
#ENTRYPOINT [ "bin/node.exe", "--expose-gc", "bin/democratic-csi" ]
ADD docker/entrypoint.ps1 ./bin
# NOTE: this powershell.exe could be problematic based on overriding PATH in container vs host etc
ENTRYPOINT [ "powershell.exe", "bin/entrypoint.ps1" ]

View File

@ -7,10 +7,17 @@
// polyfills // polyfills
require("../src/utils/polyfills"); require("../src/utils/polyfills");
const yaml = require("js-yaml"); const cp = require("child_process");
const fs = require("fs"); const fs = require("fs");
const { grpc } = require("../src/utils/grpc"); const { grpc } = require("../src/utils/grpc");
const { stringify, stripWindowsDriveLetter } = require("../src/utils/general"); const {
stringify,
stripWindowsDriveLetter,
expandenv,
} = require("../src/utils/general");
const traverse = require("traverse");
const uuidv4 = require("uuid").v4;
const yaml = require("js-yaml");
let driverConfigFile; let driverConfigFile;
let options; let options;
@ -67,6 +74,8 @@ const args = require("yargs")
"1.7.0", "1.7.0",
"1.8.0", "1.8.0",
"1.9.0", "1.9.0",
"1.10.0",
"1.11.0",
], ],
}) })
.demandOption(["csi-version"], "csi-version is required") .demandOption(["csi-version"], "csi-version is required")
@ -106,6 +115,20 @@ if (!args.serverSocket && !args.serverAddress && !args.serverPort) {
process.exit(1); process.exit(1);
} }
//console.log(JSON.stringify(options, null, 2));
traverse(options).forEach(function (v) {
if (typeof v === "string" || v instanceof String) {
v = expandenv(v);
try {
v = JSON.parse(v);
} catch (e) {
// ignore
}
this.update(v);
}
});
//console.log(JSON.stringify(options, null, 2));
//process.exit(1);
//console.log(args); //console.log(args);
//console.log(process.env); //console.log(process.env);
@ -140,10 +163,13 @@ const csi = protoDescriptor.csi.v1;
logger.info("initializing csi driver: %s", options.driver); logger.info("initializing csi driver: %s", options.driver);
const { Registry } = require("../src/utils/registry");
let globalRegistry = new Registry();
let driver; let driver;
try { try {
driver = require("../src/driver/factory").factory( driver = require("../src/driver/factory").factory(
{ logger, args, cache, package, csiVersion }, { logger, args, cache, package, csiVersion, registry: globalRegistry },
options options
); );
} catch (err) { } catch (err) {
@ -526,6 +552,28 @@ if (process.env.LOG_GRPC_SESSIONS == "1") {
if (require.main === module) { if (require.main === module) {
(async function () { (async function () {
try { try {
switch (process.platform) {
case "linux":
const nvme_dir = "/etc/nvme";
// ensure directory
if (!fs.existsSync(nvme_dir)) {
fs.mkdirSync(nvme_dir);
}
//uuidgen > /etc/nvme/hostid
if (!fs.existsSync(`${nvme_dir}/hostid`)) {
fs.writeFileSync(`${nvme_dir}/hostid`, uuidv4() + "\n");
}
//nvme gen-hostnqn > /etc/nvme/hostnqn
if (!fs.existsSync(`${nvme_dir}/hostnqn`)) {
const nqn = String(cp.execSync(`nvme gen-hostnqn`));
fs.writeFileSync(`${nvme_dir}/hostnqn`, nqn);
}
break;
}
if (bindAddress) { if (bindAddress) {
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
csiServer.bindAsync( csiServer.bindAsync(

2103
csi_proto/csi-v1.10.0.proto Normal file

File diff suppressed because it is too large Load Diff

2078
csi_proto/csi-v1.11.0.proto Normal file

File diff suppressed because it is too large Load Diff

6
docker/entrypoint.ps1 Normal file
View File

@ -0,0 +1,6 @@
write-host "starting democratic-csi via entrypoint.ps1"
$env:Path = "${pwd}\bin;${env:Path}"
.\bin\node.exe --expose-gc .\bin\democratic-csi @args
Exit $LASTEXITCODE

38
docker/yq-installer.sh Executable file
View File

@ -0,0 +1,38 @@
#!/bin/bash
set -e
set -x
PLATFORM_TYPE=${1}
if [[ "${PLATFORM_TYPE}" == "build" ]]; then
PLATFORM=$BUILDPLATFORM
else
PLATFORM=$TARGETPLATFORM
fi
if [[ "x${PLATFORM}" == "x" ]]; then
PLATFORM="linux/amd64"
fi
# these come from the --platform option of buildx, indirectly from DOCKER_BUILD_PLATFORM in main.yaml
if [ "$PLATFORM" = "linux/amd64" ]; then
export PLATFORM_ARCH="amd64"
elif [ "$PLATFORM" = "linux/arm64" ]; then
export PLATFORM_ARCH="arm64"
elif [ "$PLATFORM" = "linux/arm/v7" ]; then
export PLATFORM_ARCH="arm"
elif [ "$PLATFORM" = "linux/s390x" ]; then
export PLATFORM_ARCH="s390x"
elif [ "$PLATFORM" = "linux/ppc64le" ]; then
export PLATFORM_ARCH="ppc64le"
else
echo "unsupported/unknown yq PLATFORM ${PLATFORM}"
exit 0
fi
echo "I am installing yq $YQ_VERSION"
wget https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_${PLATFORM_ARCH} -O /usr/local/bin/yq
chmod +x /usr/local/bin/yq

View File

@ -4,63 +4,67 @@ Some drivers support different settings for volumes. These can be configured via
classes. classes.
## `synology-iscsi` ## `synology-iscsi`
The `synology-iscsi` driver supports several storage class parameters. Note however that not all parameters/values are The `synology-iscsi` driver supports several storage class parameters. Note however that not all parameters/values are
supported for all backing file systems and LUN type. The following options are available: supported for all backing file systems and LUN type. The following options are available:
### Configure Storage Classes ### Configure Storage Classes
```yaml ```yaml
apiVersion: storage.k8s.io/v1 apiVersion: storage.k8s.io/v1
kind: StorageClass kind: StorageClass
metadata: metadata:
name: synology-iscsi name: synology-iscsi
parameters: parameters:
fsType: ext4 fsType: ext4
# The following options affect the LUN representing the volume. These options are passed directly to the Synology API. # The following options affect the LUN representing the volume. These options are passed directly to the Synology API.
# The following options are known. # The following options are known.
lunTemplate: | lunTemplate: |
type: BLUN # Btrfs thin provisioning type: BLUN # Btrfs thin provisioning
type: BLUN_THICK # Btrfs thick provisioning type: BLUN_THICK # Btrfs thick provisioning
type: THIN # Ext4 thin provisioning type: THIN # Ext4 thin provisioning
type: ADV # Ext4 thin provisioning with legacy advanced feature set type: ADV # Ext4 thin provisioning with legacy advanced feature set
type: FILE # Ext4 thick provisioning type: FILE # Ext4 thick provisioning
description: Some Description description: Some Description
# Only for thick provisioned volumes. Known values: # Only for thick provisioned volumes. Known values:
# 0: Buffered Writes # 0: Buffered Writes
# 3: Direct Write # 3: Direct Write
direct_io_pattern: 0 direct_io_pattern: 0
# Device Attributes. See below for more info # Device Attributes. See below for more info
dev_attribs: dev_attribs:
- dev_attrib: emulate_tpws - dev_attrib: emulate_tpws
enable: 1 enable: 1
- ... - ...
# The following options affect the iSCSI target. These options will be passed directly to the Synology API. # The following options affect the iSCSI target. These options will be passed directly to the Synology API.
# The following options are known. # The following options are known.
targetTemplate: | targetTemplate: |
has_header_checksum: false has_header_checksum: false
has_data_checksum: false has_data_checksum: false
# Note that this option requires a compatible filesystem. Use 0 for unlimited sessions. # Note that this option requires a compatible filesystem. Use 0 for unlimited sessions.
max_sessions: 0 max_sessions: 0
multi_sessions: true multi_sessions: true
max_recv_seg_bytes: 262144 max_recv_seg_bytes: 262144
max_send_seg_bytes: 262144 max_send_seg_bytes: 262144
# Use this to disable authentication. To configure authentication see below # Use this to disable authentication. To configure authentication see below
auth_type: 0 auth_type: 0
``` ```
#### About LUN Types #### About LUN Types
The availability of the different types of LUNs depends on the filesystem used on your Synology volume. For Btrfs volumes The availability of the different types of LUNs depends on the filesystem used on your Synology volume. For Btrfs volumes
you can use `BLUN` and `BLUN_THICK` volumes. For Ext4 volumes you can use `THIN`, `ADV` or `FILE` volumes. These you can use `BLUN` and `BLUN_THICK` volumes. For Ext4 volumes you can use `THIN`, `ADV` or `FILE` volumes. These
correspond to the options available in the UI. correspond to the options available in the UI.
#### About `dev_attribs` #### About `dev_attribs`
Most of the LUN options are configured via the `dev_attribs` list. This list can be specified both in the `lunTemplate` Most of the LUN options are configured via the `dev_attribs` list. This list can be specified both in the `lunTemplate`
of the global configuration and in the `lunTemplate` of the `StorageClass`. If both lists are present they will be merged of the global configuration and in the `lunTemplate` of the `StorageClass`. If both lists are present they will be merged
(with the `StorageClass` taking precedence). The following `dev_attribs` are known to work: (with the `StorageClass` taking precedence). The following `dev_attribs` are known to work:
- `emulate_tpws`: Hardware-assisted zeroing - `emulate_tpws`: Hardware-assisted zeroing
- `emulate_caw`: Hardware-assisted locking - `emulate_caw`: Hardware-assisted locking
@ -71,6 +75,7 @@ of the global configuration and in the `lunTemplate` of the `StorageClass`. If b
- `can_snapshot`: Enable snapshots for this volume. Only works for thin provisioned volumes. - `can_snapshot`: Enable snapshots for this volume. Only works for thin provisioned volumes.
### Configure Snapshot Classes ### Configure Snapshot Classes
`synology-iscsi` can also configure different parameters on snapshot classes: `synology-iscsi` can also configure different parameters on snapshot classes:
```yaml ```yaml
@ -87,13 +92,13 @@ parameters:
# Note that app consistent snapshots require a working Synology Storage Console. Otherwise both values will have # Note that app consistent snapshots require a working Synology Storage Console. Otherwise both values will have
# equivalent behavior. # equivalent behavior.
is_app_consistent: true is_app_consistent: true
...
``` ```
Note that it is currently not supported by Synology devices to restore a snapshot onto a different volume. You can Note that it is currently not supported by Synology devices to restore a snapshot onto a different volume. You can
create volumes from snapshots, but you should use the same `StorageClass` as the original volume of the snapshot did. create volumes from snapshots, but you should use the same `StorageClass` as the original volume of the snapshot did.
### Enabling CHAP Authentication ### Enabling CHAP Authentication
You can enable CHAP Authentication for `StorageClass`es by supplying an appropriate `StorageClass` secret (see the You can enable CHAP Authentication for `StorageClass`es by supplying an appropriate `StorageClass` secret (see the
[documentation](https://kubernetes-csi.github.io/docs/secrets-and-credentials-storage-class.html) for more details). You [documentation](https://kubernetes-csi.github.io/docs/secrets-and-credentials-storage-class.html) for more details). You
can use the same password for alle volumes of a `StorageClass` or use different passwords per volume. can use the same password for alle volumes of a `StorageClass` or use different passwords per volume.
@ -123,12 +128,17 @@ kind: Secret
metadata: metadata:
name: chap-secret name: chap-secret
stringData: stringData:
# Client Credentials lunTemplate: |
user: client ...
password: MySecretPassword targetTemplate: |
# Mutual CHAP Credentials. If these are specified mutual CHAP will be enabled. # Client Credentials
mutualUser: server user: client
mutualPassword: MyOtherPassword password: MySecretPassword
# Mutual CHAP Credentials. If these are specified mutual CHAP will be enabled.
mutualUser: server
mutualPassword: MyOtherPassword
lunSnapshotTemplate: |
...
``` ```
Note that CHAP authentication will only be enabled if the secret contains a username and password. If e.g. a password is Note that CHAP authentication will only be enabled if the secret contains a username and password. If e.g. a password is

1551
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -34,6 +34,7 @@
"reconnecting-websocket": "^4.4.0", "reconnecting-websocket": "^4.4.0",
"semver": "^7.3.4", "semver": "^7.3.4",
"ssh2": "^1.1.0", "ssh2": "^1.1.0",
"traverse": "^0.6.11",
"uri-js": "^4.4.1", "uri-js": "^4.4.1",
"uuid": "^9.0.0", "uuid": "^9.0.0",
"winston": "^3.6.0", "winston": "^3.6.0",

View File

@ -7,7 +7,6 @@ const fse = require("fs-extra");
const Kopia = require("../../utils/kopia").Kopia; const Kopia = require("../../utils/kopia").Kopia;
const os = require("os"); const os = require("os");
const path = require("path"); const path = require("path");
const registry = require("../../utils/registry");
const Restic = require("../../utils/restic").Restic; const Restic = require("../../utils/restic").Restic;
const semver = require("semver"); const semver = require("semver");
@ -458,7 +457,7 @@ class ControllerClientCommonDriver extends CsiBaseDriver {
async getResticClient() { async getResticClient() {
const driver = this; const driver = this;
return registry.get(`${__REGISTRY_NS__}:restic`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:restic`, () => {
const config_key = driver.getConfigKey(); const config_key = driver.getConfigKey();
const restic_env = _.get( const restic_env = _.get(
@ -491,7 +490,7 @@ class ControllerClientCommonDriver extends CsiBaseDriver {
async getKopiaClient() { async getKopiaClient() {
const driver = this; const driver = this;
return registry.getAsync(`${__REGISTRY_NS__}:kopia`, async () => { return this.ctx.registry.getAsync(`${__REGISTRY_NS__}:kopia`, async () => {
const config_key = driver.getConfigKey(); const config_key = driver.getConfigKey();
const kopia_env = _.get( const kopia_env = _.get(

View File

@ -3,7 +3,6 @@ const { CsiBaseDriver } = require("../index");
const { GrpcError, grpc } = require("../../utils/grpc"); const { GrpcError, grpc } = require("../../utils/grpc");
const GeneralUtils = require("../../utils/general"); const GeneralUtils = require("../../utils/general");
const { ObjectiveFS } = require("../../utils/objectivefs"); const { ObjectiveFS } = require("../../utils/objectivefs");
const registry = require("../../utils/registry");
const semver = require("semver"); const semver = require("semver");
const uuidv4 = require("uuid").v4; const uuidv4 = require("uuid").v4;
@ -105,7 +104,7 @@ class ControllerObjectiveFSDriver extends CsiBaseDriver {
async getObjectiveFSClient() { async getObjectiveFSClient() {
const driver = this; const driver = this;
return registry.getAsync( return this.ctx.registry.getAsync(
`${__REGISTRY_NS__}:objectivefsclient`, `${__REGISTRY_NS__}:objectivefsclient`,
async () => { async () => {
const options = {}; const options = {};

View File

@ -3,8 +3,8 @@ const http = require("http");
const https = require("https"); const https = require("https");
const { axios_request, stringify } = require("../../../utils/general"); const { axios_request, stringify } = require("../../../utils/general");
const Mutex = require("async-mutex").Mutex; const Mutex = require("async-mutex").Mutex;
const registry = require("../../../utils/registry");
const { GrpcError, grpc } = require("../../../utils/grpc"); const { GrpcError, grpc } = require("../../../utils/grpc");
const { Registry } = require("../../../utils/registry");
const USER_AGENT = "democratic-csi"; const USER_AGENT = "democratic-csi";
const __REGISTRY_NS__ = "SynologyHttpClient"; const __REGISTRY_NS__ = "SynologyHttpClient";
@ -85,6 +85,7 @@ class SynologyHttpClient {
this.logger = console; this.logger = console;
this.doLoginMutex = new Mutex(); this.doLoginMutex = new Mutex();
this.apiSerializeMutex = new Mutex(); this.apiSerializeMutex = new Mutex();
this.registry = new Registry();
if (false) { if (false) {
setInterval(() => { setInterval(() => {
@ -95,7 +96,7 @@ class SynologyHttpClient {
} }
getHttpAgent() { getHttpAgent() {
return registry.get(`${__REGISTRY_NS__}:http_agent`, () => { return this.registry.get(`${__REGISTRY_NS__}:http_agent`, () => {
return new http.Agent({ return new http.Agent({
keepAlive: true, keepAlive: true,
maxSockets: Infinity, maxSockets: Infinity,
@ -105,7 +106,7 @@ class SynologyHttpClient {
} }
getHttpsAgent() { getHttpsAgent() {
return registry.get(`${__REGISTRY_NS__}:https_agent`, () => { return this.registry.get(`${__REGISTRY_NS__}:https_agent`, () => {
return new https.Agent({ return new https.Agent({
keepAlive: true, keepAlive: true,
maxSockets: Infinity, maxSockets: Infinity,

View File

@ -3,7 +3,6 @@ const { CsiBaseDriver } = require("../index");
const GeneralUtils = require("../../utils/general"); const GeneralUtils = require("../../utils/general");
const { GrpcError, grpc } = require("../../utils/grpc"); const { GrpcError, grpc } = require("../../utils/grpc");
const Handlebars = require("handlebars"); const Handlebars = require("handlebars");
const registry = require("../../utils/registry");
const SynologyHttpClient = require("./http").SynologyHttpClient; const SynologyHttpClient = require("./http").SynologyHttpClient;
const semver = require("semver"); const semver = require("semver");
const yaml = require("js-yaml"); const yaml = require("js-yaml");
@ -115,7 +114,7 @@ class ControllerSynologyDriver extends CsiBaseDriver {
} }
async getHttpClient() { async getHttpClient() {
return registry.get(`${__REGISTRY_NS__}:http_client`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:http_client`, () => {
return new SynologyHttpClient(this.options.httpConnection); return new SynologyHttpClient(this.options.httpConnection);
}); });
} }

View File

@ -2,7 +2,6 @@ const _ = require("lodash");
const { ControllerZfsBaseDriver } = require("../controller-zfs"); const { ControllerZfsBaseDriver } = require("../controller-zfs");
const { GrpcError, grpc } = require("../../utils/grpc"); const { GrpcError, grpc } = require("../../utils/grpc");
const GeneralUtils = require("../../utils/general"); const GeneralUtils = require("../../utils/general");
const registry = require("../../utils/registry");
const LocalCliExecClient = const LocalCliExecClient =
require("../../utils/zfs_local_exec_client").LocalCliClient; require("../../utils/zfs_local_exec_client").LocalCliClient;
const SshClient = require("../../utils/zfs_ssh_exec_client").SshClient; const SshClient = require("../../utils/zfs_ssh_exec_client").SshClient;
@ -15,7 +14,7 @@ const NVMEOF_ASSETS_NAME_PROPERTY_NAME = "democratic-csi:nvmeof_assets_name";
const __REGISTRY_NS__ = "ControllerZfsGenericDriver"; const __REGISTRY_NS__ = "ControllerZfsGenericDriver";
class ControllerZfsGenericDriver extends ControllerZfsBaseDriver { class ControllerZfsGenericDriver extends ControllerZfsBaseDriver {
getExecClient() { getExecClient() {
return registry.get(`${__REGISTRY_NS__}:exec_client`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:exec_client`, () => {
if (this.options.sshConnection) { if (this.options.sshConnection) {
return new SshClient({ return new SshClient({
logger: this.ctx.logger, logger: this.ctx.logger,
@ -30,7 +29,7 @@ class ControllerZfsGenericDriver extends ControllerZfsBaseDriver {
} }
async getZetabyte() { async getZetabyte() {
return registry.getAsync(`${__REGISTRY_NS__}:zb`, async () => { return this.ctx.registry.getAsync(`${__REGISTRY_NS__}:zb`, async () => {
const execClient = this.getExecClient(); const execClient = this.getExecClient();
const options = {}; const options = {};
if (this.options.sshConnection) { if (this.options.sshConnection) {

View File

@ -4,7 +4,6 @@ const { GrpcError, grpc } = require("../../utils/grpc");
const GeneralUtils = require("../../utils/general"); const GeneralUtils = require("../../utils/general");
const LocalCliExecClient = const LocalCliExecClient =
require("../../utils/zfs_local_exec_client").LocalCliClient; require("../../utils/zfs_local_exec_client").LocalCliClient;
const registry = require("../../utils/registry");
const { Zetabyte } = require("../../utils/zfs"); const { Zetabyte } = require("../../utils/zfs");
const ZFS_ASSET_NAME_PROPERTY_NAME = "zfs_asset_name"; const ZFS_ASSET_NAME_PROPERTY_NAME = "zfs_asset_name";
@ -33,7 +32,7 @@ class ControllerZfsLocalDriver extends ControllerZfsBaseDriver {
} }
getExecClient() { getExecClient() {
return registry.get(`${__REGISTRY_NS__}:exec_client`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:exec_client`, () => {
return new LocalCliExecClient({ return new LocalCliExecClient({
logger: this.ctx.logger, logger: this.ctx.logger,
}); });
@ -41,7 +40,7 @@ class ControllerZfsLocalDriver extends ControllerZfsBaseDriver {
} }
async getZetabyte() { async getZetabyte() {
return registry.getAsync(`${__REGISTRY_NS__}:zb`, async () => { return this.ctx.registry.getAsync(`${__REGISTRY_NS__}:zb`, async () => {
const execClient = this.getExecClient(); const execClient = this.getExecClient();
const options = {}; const options = {};

View File

@ -4,7 +4,6 @@ const { CsiBaseDriver } = require("../index");
const HttpClient = require("./http").Client; const HttpClient = require("./http").Client;
const TrueNASApiClient = require("./http/api").Api; const TrueNASApiClient = require("./http/api").Api;
const { Zetabyte } = require("../../utils/zfs"); const { Zetabyte } = require("../../utils/zfs");
const registry = require("../../utils/registry");
const GeneralUtils = require("../../utils/general"); const GeneralUtils = require("../../utils/general");
const Handlebars = require("handlebars"); const Handlebars = require("handlebars");
@ -156,7 +155,7 @@ class FreeNASApiDriver extends CsiBaseDriver {
* @returns * @returns
*/ */
async getZetabyte() { async getZetabyte() {
return registry.get(`${__REGISTRY_NS__}:zb`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:zb`, () => {
return new Zetabyte({ return new Zetabyte({
executor: { executor: {
spawn: function () { spawn: function () {
@ -1799,8 +1798,13 @@ class FreeNASApiDriver extends CsiBaseDriver {
async removeSnapshotsFromDatatset(datasetName) { async removeSnapshotsFromDatatset(datasetName) {
const httpApiClient = await this.getTrueNASHttpApiClient(); const httpApiClient = await this.getTrueNASHttpApiClient();
// const httpClient = await this.getHttpClient();
// const major = await httpApiClient.getSystemVersionMajor();
let job_id = await httpApiClient.DatasetDestroySnapshots(datasetName); let job_id = await httpApiClient.DatasetDestroySnapshots(datasetName);
await httpApiClient.CoreWaitForJob(job_id, 30); if (job_id) {
await httpApiClient.CoreWaitForJob(job_id, 30);
}
} }
/** /**
@ -2017,7 +2021,7 @@ class FreeNASApiDriver extends CsiBaseDriver {
} }
async getHttpClient() { async getHttpClient() {
return registry.get(`${__REGISTRY_NS__}:http_client`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:http_client`, () => {
const client = new HttpClient(this.options.httpConnection); const client = new HttpClient(this.options.httpConnection);
client.logger = this.ctx.logger; client.logger = this.ctx.logger;
client.setApiVersion(2); // requires version 2 client.setApiVersion(2); // requires version 2
@ -2034,10 +2038,13 @@ class FreeNASApiDriver extends CsiBaseDriver {
} }
async getTrueNASHttpApiClient() { async getTrueNASHttpApiClient() {
return registry.getAsync(`${__REGISTRY_NS__}:api_client`, async () => { return this.ctx.registry.getAsync(
const httpClient = await this.getHttpClient(); `${__REGISTRY_NS__}:api_client`,
return new TrueNASApiClient(httpClient, this.ctx.cache); async () => {
}); const httpClient = await this.getHttpClient();
return new TrueNASApiClient(httpClient, this.ctx.cache);
}
);
} }
getAccessModes(capability) { getAccessModes(capability) {

View File

@ -1,6 +1,7 @@
const registry = require("../../../utils/registry"); const _ = require("lodash");
const { sleep, stringify } = require("../../../utils/general"); const { sleep, stringify } = require("../../../utils/general");
const { Zetabyte } = require("../../../utils/zfs"); const { Zetabyte } = require("../../../utils/zfs");
const { Registry } = require("../../../utils/registry");
// used for in-memory cache of the version info // used for in-memory cache of the version info
const FREENAS_SYSTEM_VERSION_CACHE_KEY = "freenas:system_version"; const FREENAS_SYSTEM_VERSION_CACHE_KEY = "freenas:system_version";
@ -11,6 +12,7 @@ class Api {
this.client = client; this.client = client;
this.cache = cache; this.cache = cache;
this.options = options; this.options = options;
this.registry = new Registry();
} }
async getHttpClient() { async getHttpClient() {
@ -22,7 +24,7 @@ class Api {
* @returns * @returns
*/ */
async getZetabyte() { async getZetabyte() {
return registry.get(`${__REGISTRY_NS__}:zb`, () => { return this.registry.get(`${__REGISTRY_NS__}:zb`, () => {
return new Zetabyte({ return new Zetabyte({
executor: { executor: {
spawn: function () { spawn: function () {
@ -422,13 +424,13 @@ class Api {
* @param {*} properties * @param {*} properties
* @returns * @returns
*/ */
async DatasetGet(datasetName, properties) { async DatasetGet(datasetName, properties, queryParams = {}) {
const httpClient = await this.getHttpClient(false); const httpClient = await this.getHttpClient(false);
let response; let response;
let endpoint; let endpoint;
endpoint = `/pool/dataset/id/${encodeURIComponent(datasetName)}`; endpoint = `/pool/dataset/id/${encodeURIComponent(datasetName)}`;
response = await httpClient.get(endpoint); response = await httpClient.get(endpoint, queryParams);
if (response.statusCode == 200) { if (response.statusCode == 200) {
return this.normalizeProperties(response.body, properties); return this.normalizeProperties(response.body, properties);
@ -441,28 +443,60 @@ class Api {
throw new Error(JSON.stringify(response.body)); throw new Error(JSON.stringify(response.body));
} }
/**
* This is meant to destroy all snapshots on the given dataset
*
* @param {*} datasetName
* @param {*} data
* @returns
*/
async DatasetDestroySnapshots(datasetName, data = {}) { async DatasetDestroySnapshots(datasetName, data = {}) {
const httpClient = await this.getHttpClient(false); const httpClient = await this.getHttpClient(false);
let response; let response;
let endpoint; let endpoint;
data.name = datasetName; const major = await this.getSystemVersionMajor();
if (Number(major) >= 25) {
try {
response = await this.DatasetGet(
datasetName,
["id", "type", "name", "pool", "snapshots"],
{
"extra.snapshots": "true",
"extra.retrieve_children": "false",
}
);
endpoint = "/pool/dataset/destroy_snapshots"; for (const snapshot of _.get(response, "snapshots", [])) {
response = await httpClient.post(endpoint, data); await this.SnapshotDelete(snapshot.name, {
defer: true,
});
}
} catch (err) {
if (err.toString().includes("dataset does not exist")) {
return;
}
throw err;
}
} else {
data.name = datasetName;
if (response.statusCode == 200) { endpoint = "/pool/dataset/destroy_snapshots";
return response.body; response = await httpClient.post(endpoint, data);
if (response.statusCode == 200) {
return response.body;
}
if (
response.statusCode == 422 &&
JSON.stringify(response.body).includes("already exists")
) {
return;
}
throw new Error(JSON.stringify(response.body));
} }
if (
response.statusCode == 422 &&
JSON.stringify(response.body).includes("already exists")
) {
return;
}
throw new Error(JSON.stringify(response.body));
} }
async SnapshotSet(snapshotName, properties) { async SnapshotSet(snapshotName, properties) {

View File

@ -1,7 +1,6 @@
const _ = require("lodash"); const _ = require("lodash");
const { ControllerZfsBaseDriver } = require("../controller-zfs"); const { ControllerZfsBaseDriver } = require("../controller-zfs");
const { GrpcError, grpc } = require("../../utils/grpc"); const { GrpcError, grpc } = require("../../utils/grpc");
const registry = require("../../utils/registry");
const SshClient = require("../../utils/zfs_ssh_exec_client").SshClient; const SshClient = require("../../utils/zfs_ssh_exec_client").SshClient;
const HttpClient = require("./http").Client; const HttpClient = require("./http").Client;
const TrueNASApiClient = require("./http/api").Api; const TrueNASApiClient = require("./http/api").Api;
@ -57,7 +56,7 @@ class FreeNASSshDriver extends ControllerZfsBaseDriver {
} }
getExecClient() { getExecClient() {
return registry.get(`${__REGISTRY_NS__}:exec_client`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:exec_client`, () => {
return new SshClient({ return new SshClient({
logger: this.ctx.logger, logger: this.ctx.logger,
connection: this.options.sshConnection, connection: this.options.sshConnection,
@ -66,7 +65,7 @@ class FreeNASSshDriver extends ControllerZfsBaseDriver {
} }
async getZetabyte() { async getZetabyte() {
return registry.getAsync(`${__REGISTRY_NS__}:zb`, async () => { return this.ctx.registry.getAsync(`${__REGISTRY_NS__}:zb`, async () => {
const sshClient = this.getExecClient(); const sshClient = this.getExecClient();
const options = {}; const options = {};
options.executor = new ZfsSshProcessManager(sshClient); options.executor = new ZfsSshProcessManager(sshClient);
@ -121,12 +120,21 @@ class FreeNASSshDriver extends ControllerZfsBaseDriver {
chroot: "/usr/sbin/chroot", chroot: "/usr/sbin/chroot",
}; };
} }
if (isScale && Number(majorMinor) >= 25) {
options.paths = {
zfs: "/usr/sbin/zfs",
zpool: "/usr/sbin/zpool",
sudo: "/usr/bin/sudo",
chroot: "/usr/sbin/chroot",
};
}
} }
} }
async getHttpClient(autoDetectVersion = true) { async getHttpClient(autoDetectVersion = true) {
const autodetectkey = autoDetectVersion === true ? 1 : 0; const autodetectkey = autoDetectVersion === true ? 1 : 0;
return registry.getAsync( return this.ctx.registry.getAsync(
`${__REGISTRY_NS__}:http_client:autoDetectVersion_${autodetectkey}`, `${__REGISTRY_NS__}:http_client:autoDetectVersion_${autodetectkey}`,
async () => { async () => {
const client = new HttpClient(this.options.httpConnection); const client = new HttpClient(this.options.httpConnection);
@ -143,10 +151,13 @@ class FreeNASSshDriver extends ControllerZfsBaseDriver {
} }
async getTrueNASHttpApiClient() { async getTrueNASHttpApiClient() {
return registry.getAsync(`${__REGISTRY_NS__}:api_client`, async () => { return this.ctx.registry.getAsync(
const httpClient = await this.getHttpClient(); `${__REGISTRY_NS__}:api_client`,
return new TrueNASApiClient(httpClient, this.ctx.cache); async () => {
}); const httpClient = await this.getHttpClient();
return new TrueNASApiClient(httpClient, this.ctx.cache);
}
);
} }
getDriverShareType() { getDriverShareType() {

View File

@ -12,7 +12,6 @@ const { OneClient } = require("../utils/oneclient");
const { Filesystem } = require("../utils/filesystem"); const { Filesystem } = require("../utils/filesystem");
const { ISCSI } = require("../utils/iscsi"); const { ISCSI } = require("../utils/iscsi");
const { NVMEoF } = require("../utils/nvmeof"); const { NVMEoF } = require("../utils/nvmeof");
const registry = require("../utils/registry");
const semver = require("semver"); const semver = require("semver");
const GeneralUtils = require("../utils/general"); const GeneralUtils = require("../utils/general");
const { Zetabyte } = require("../utils/zfs"); const { Zetabyte } = require("../utils/zfs");
@ -111,7 +110,7 @@ class CsiBaseDriver {
* @returns Filesystem * @returns Filesystem
*/ */
getDefaultFilesystemInstance() { getDefaultFilesystemInstance() {
return registry.get( return this.ctx.registry.get(
`${__REGISTRY_NS__}:default_filesystem_instance`, `${__REGISTRY_NS__}:default_filesystem_instance`,
() => { () => {
return new Filesystem(); return new Filesystem();
@ -125,7 +124,7 @@ class CsiBaseDriver {
* @returns Mount * @returns Mount
*/ */
getDefaultMountInstance() { getDefaultMountInstance() {
return registry.get(`${__REGISTRY_NS__}:default_mount_instance`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:default_mount_instance`, () => {
const filesystem = this.getDefaultFilesystemInstance(); const filesystem = this.getDefaultFilesystemInstance();
return new Mount({ filesystem }); return new Mount({ filesystem });
}); });
@ -137,7 +136,7 @@ class CsiBaseDriver {
* @returns ISCSI * @returns ISCSI
*/ */
getDefaultISCSIInstance() { getDefaultISCSIInstance() {
return registry.get(`${__REGISTRY_NS__}:default_iscsi_instance`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:default_iscsi_instance`, () => {
return new ISCSI(); return new ISCSI();
}); });
} }
@ -149,13 +148,13 @@ class CsiBaseDriver {
*/ */
getDefaultNVMEoFInstance() { getDefaultNVMEoFInstance() {
const driver = this; const driver = this;
return registry.get(`${__REGISTRY_NS__}:default_nvmeof_instance`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:default_nvmeof_instance`, () => {
return new NVMEoF({ logger: driver.ctx.logger }); return new NVMEoF({ logger: driver.ctx.logger });
}); });
} }
getDefaultZetabyteInstance() { getDefaultZetabyteInstance() {
return registry.get(`${__REGISTRY_NS__}:default_zb_instance`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:default_zb_instance`, () => {
return new Zetabyte({ return new Zetabyte({
idempotent: true, idempotent: true,
paths: { paths: {
@ -177,14 +176,14 @@ class CsiBaseDriver {
} }
getDefaultOneClientInstance() { getDefaultOneClientInstance() {
return registry.get(`${__REGISTRY_NS__}:default_oneclient_instance`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:default_oneclient_instance`, () => {
return new OneClient(); return new OneClient();
}); });
} }
getDefaultObjectiveFSInstance() { getDefaultObjectiveFSInstance() {
const driver = this; const driver = this;
return registry.get( return this.ctx.registry.get(
`${__REGISTRY_NS__}:default_objectivefs_instance`, `${__REGISTRY_NS__}:default_objectivefs_instance`,
() => { () => {
return new ObjectiveFS({ return new ObjectiveFS({
@ -199,7 +198,7 @@ class CsiBaseDriver {
* @returns CsiProxyClient * @returns CsiProxyClient
*/ */
getDefaultCsiProxyClientInstance() { getDefaultCsiProxyClientInstance() {
return registry.get(`${__REGISTRY_NS__}:default_csi_proxy_instance`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:default_csi_proxy_instance`, () => {
const options = {}; const options = {};
options.services = _.get(this.options, "node.csiProxy.services", {}); options.services = _.get(this.options, "node.csiProxy.services", {});
return new CsiProxyClient(options); return new CsiProxyClient(options);
@ -207,7 +206,7 @@ class CsiBaseDriver {
} }
getDefaultKubernetsConfigInstance() { getDefaultKubernetsConfigInstance() {
return registry.get( return this.ctx.registry.get(
`${__REGISTRY_NS__}:default_kubernetes_config_instance`, `${__REGISTRY_NS__}:default_kubernetes_config_instance`,
() => { () => {
const kc = new k8s.KubeConfig(); const kc = new k8s.KubeConfig();

View File

@ -2,7 +2,6 @@ const fs = require("fs");
const { CsiBaseDriver } = require("../index"); const { CsiBaseDriver } = require("../index");
const { GrpcError, grpc } = require("../../utils/grpc"); const { GrpcError, grpc } = require("../../utils/grpc");
const { Filesystem } = require("../../utils/filesystem"); const { Filesystem } = require("../../utils/filesystem");
const registry = require("../../utils/registry");
const semver = require("semver"); const semver = require("semver");
const SshClient = require("../../utils/zfs_ssh_exec_client").SshClient; const SshClient = require("../../utils/zfs_ssh_exec_client").SshClient;
const { Zetabyte, ZfsSshProcessManager } = require("../../utils/zfs"); const { Zetabyte, ZfsSshProcessManager } = require("../../utils/zfs");
@ -125,7 +124,7 @@ class ZfsLocalEphemeralInlineDriver extends CsiBaseDriver {
} }
getSshClient() { getSshClient() {
return registry.get(`${__REGISTRY_NS__}:ssh_client`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:ssh_client`, () => {
return new SshClient({ return new SshClient({
logger: this.ctx.logger, logger: this.ctx.logger,
connection: this.options.sshConnection, connection: this.options.sshConnection,
@ -134,7 +133,7 @@ class ZfsLocalEphemeralInlineDriver extends CsiBaseDriver {
} }
getZetabyte() { getZetabyte() {
return registry.get(`${__REGISTRY_NS__}:zb`, () => { return this.ctx.registry.get(`${__REGISTRY_NS__}:zb`, () => {
let sshClient; let sshClient;
let executor; let executor;
if (this.options.sshConnection) { if (this.options.sshConnection) {

View File

@ -272,6 +272,19 @@ async function hostname_lookup(hostname) {
}); });
} }
function expandenv(string, env) {
if (!(typeof string === "string" || string instanceof String)) {
throw new Error("Please pass a string into expandenv");
}
env = env ? env : process.env;
return string.replace(/\$\{?[a-zA-Z_]+[a-zA-Z0-9_]*\}?/g, function (match) {
match = match.replace(/[^A-Za-z0-9_]/g, "");
return env[match] || "";
});
}
module.exports.sleep = sleep; module.exports.sleep = sleep;
module.exports.md5 = md5; module.exports.md5 = md5;
module.exports.crc32 = crc32; module.exports.crc32 = crc32;
@ -292,3 +305,4 @@ module.exports.default_supported_file_filesystems =
module.exports.retry = retry; module.exports.retry = retry;
module.exports.trimchar = trimchar; module.exports.trimchar = trimchar;
module.exports.hostname_lookup = hostname_lookup; module.exports.hostname_lookup = hostname_lookup;
module.exports.expandenv = expandenv;

View File

@ -48,6 +48,4 @@ class Registry {
} }
} }
const registry = new Registry(); module.exports.Registry = Registry;
module.exports = registry;