1+ #! /bin/bash
2+
3+ set -e
4+
5+ echo " Checking for updates to workbench-template from GitHub"
6+
7+ WORKSPACE_DIR=" /workspaces/spark-template"
8+
9+ MARKER_DIR=" /var/lib/spark/.versions"
10+ RELEASE_MARKER_FILE=" $MARKER_DIR /release"
11+ TOOLS_MARKER_FILE=" $MARKER_DIR /tools"
12+
13+ sudo mkdir -p " $MARKER_DIR "
14+
15+ # Fetch the latest release information
16+ LATEST_RELEASE=$( curl -s -H " Authorization: token $TEMPLATE_PAT " https://api.github.com/repos/github/spark-template/releases/latest)
17+
18+ # Check if marker file exists and has the same release ID
19+ RELEASE_ID=$( echo " $LATEST_RELEASE " | jq -r ' .id' )
20+ if [ -f " $RELEASE_MARKER_FILE " ] && [ " $( cat " $RELEASE_MARKER_FILE " ) " == " $RELEASE_ID " ]; then
21+ echo " Already at the latest release. Skipping download."
22+ exit 0
23+ fi
24+
25+ echo " New version found. Downloading latest release."
26+
27+ TEMP_DIR=$( mktemp -d)
28+ cd $TEMP_DIR
29+
30+ DOWNLOAD_URL=$( echo " $LATEST_RELEASE " | jq -r ' .assets[0].url' )
31+ curl -L -o dist.zip -H " Authorization: token $TEMPLATE_PAT " -H " Accept: application/octet-stream" " $DOWNLOAD_URL "
32+
33+ unzip -o dist.zip
34+ rm dist.zip
35+
36+ # Upgrade the Spark Runtime tools
37+ sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
38+ sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
39+ sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
40+ sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
41+ sudo mv ./spark-sdk-dist/hydrate.sh /usr/local/bin/hydrate.sh
42+ sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
43+ sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
44+ sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
45+ sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
46+
47+ # Upgrade the Spark Tools package
48+ if [ -f " $TOOLS_MARKER_FILE " ] && [ " $( cat " $TOOLS_MARKER_FILE " ) " == " $( cat ./spark-sdk-dist/spark-tools-version) " ]; then
49+ echo " Already at the latest tools version. Skipping extraction."
50+ else
51+ tar -xzf ./spark-sdk-dist/spark-tools.tgz
52+
53+ sudo rm -rf $WORKSPACE_DIR /packages/spark-tools
54+ mkdir -p $WORKSPACE_DIR /packages/spark-tools
55+ sudo mv ./package/* $WORKSPACE_DIR /packages/spark-tools
56+ sudo rmdir ./package
57+
58+ cd $WORKSPACE_DIR
59+ npm i -f
60+ cd - > /dev/null
61+
62+ sudo cp ./spark-sdk-dist/spark-tools-version " $TOOLS_MARKER_FILE "
63+ fi
64+
65+ # Upgrade the GH CLI extension
66+ sudo rm -rf /usr/local/bin/gh-spark-cli
67+ sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
68+ cd /usr/local/bin/gh-spark-cli
69+ # The --force option on gh extension install isn't honored for local, so manually remove it first.
70+ # It's not an issue if that fails though as that probably just means it was the first time running this.
71+ gh extension remove spark-cli > /dev/null || true
72+ gh extension install .
73+ gh alias set spark spark-cli --clobber
74+ cd - > /dev/null
75+
76+ rm -rf $TEMP_DIR
77+
78+ # Update marker file with latest release ID
79+ echo " $RELEASE_ID " | sudo tee " $RELEASE_MARKER_FILE " > /dev/null
80+
81+ echo " Tools installed successfully."
0 commit comments