Skip to content

Commit 898d05a

Browse files
committed
Initial commit
0 parents  commit 898d05a

75 files changed

Lines changed: 14200 additions & 0 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.devcontainer/devcontainer.json

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
{
2+
"name": "Workbench template",
3+
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bookworm",
4+
"hostRequirements": {
5+
"storage": "32gb",
6+
"memory": "8gb",
7+
"cpus": 4
8+
},
9+
// Runs automatically after Prebuild Template environment is created.
10+
"onCreateCommand": ".devcontainer/onCreate.sh",
11+
// Runs automatically every time the dev container has been (re-)started
12+
"postStartCommand": ".devcontainer/postStartCommand.sh",
13+
"customizations": {
14+
"codespaces": {
15+
"onStopCommand": "REVISION_NAME=spark-preview /usr/local/bin/deploy.sh"
16+
}
17+
},
18+
"forwardPorts": [
19+
4000,
20+
5000,
21+
9000,
22+
13000
23+
],
24+
"features": {
25+
"ghcr.io/devcontainers/features/sshd:1": {
26+
"version": "latest"
27+
}
28+
}
29+
}

.devcontainer/onCreate.sh

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
echo "Installing the GitHub CLI"
6+
(type -p wget >/dev/null || (sudo apt update && sudo apt-get install wget -y)) \
7+
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
8+
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
9+
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
10+
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
11+
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
12+
&& sudo apt update \
13+
&& sudo apt install gh inotify-tools -y
14+
15+
echo "Installing azcopy"
16+
17+
sudo wget -O /usr/local/bin/azcopytar https://aka.ms/downloadazcopy-v10-linux
18+
sudo tar -xvf /usr/local/bin/azcopytar -C /usr/local/bin/
19+
sudo rm /usr/local/bin/azcopytar
20+
azcopy_dir=$(find /usr/local/bin/ -type d -name "azcopy*" | head -n 1)
21+
sudo mv "$azcopy_dir/azcopy" /usr/local/bin/azcopy
22+
sudo rm -rf "$azcopy_dir"
23+
24+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
25+
"$SCRIPT_DIR/refreshTools.sh"
26+
27+
echo "Pre-starting the server and generating the optimized assets"
28+
npm run optimize --override
29+
30+
echo "Installing supervisor"
31+
sudo apt-get update && sudo apt-get install -y supervisor

.devcontainer/postStartCommand.sh

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#!/bin/bash
2+
3+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
4+
"$SCRIPT_DIR/refreshTools.sh"
5+
6+
# Check if SNAPSHOT_SAS_URL was passed, if so run hydrate.sh in background
7+
if [ -n "$SNAPSHOT_SAS_URL" ]; then
8+
WORKSPACE_DIR="/workspaces/spark-template"
9+
SAS_URI="$SNAPSHOT_SAS_URL" /usr/local/bin/hydrate.sh $WORKSPACE_DIR
10+
fi
11+
12+
sudo cp .devcontainer/spark.conf /etc/supervisor/conf.d/
13+
14+
sudo chown node /var/run/
15+
sudo chown -R node /var/log/
16+
17+
supervisord
18+
supervisorctl reread
19+
supervisorctl update

.devcontainer/refreshTools.sh

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
echo "Checking for updates to workbench-template from GitHub"
6+
7+
WORKSPACE_DIR="/workspaces/spark-template"
8+
9+
MARKER_DIR="/var/lib/spark/.versions"
10+
RELEASE_MARKER_FILE="$MARKER_DIR/release"
11+
TOOLS_MARKER_FILE="$MARKER_DIR/tools"
12+
13+
sudo mkdir -p "$MARKER_DIR"
14+
15+
# Fetch the latest release information
16+
LATEST_RELEASE=$(curl -s -H "Authorization: token $TEMPLATE_PAT" https://api.github.com/repos/github/spark-template/releases/latest)
17+
18+
# Check if marker file exists and has the same release ID
19+
RELEASE_ID=$(echo "$LATEST_RELEASE" | jq -r '.id')
20+
if [ -f "$RELEASE_MARKER_FILE" ] && [ "$(cat "$RELEASE_MARKER_FILE")" == "$RELEASE_ID" ]; then
21+
echo "Already at the latest release. Skipping download."
22+
exit 0
23+
fi
24+
25+
echo "New version found. Downloading latest release."
26+
27+
TEMP_DIR=$(mktemp -d)
28+
cd $TEMP_DIR
29+
30+
DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | jq -r '.assets[0].url')
31+
curl -L -o dist.zip -H "Authorization: token $TEMPLATE_PAT" -H "Accept: application/octet-stream" "$DOWNLOAD_URL"
32+
33+
unzip -o dist.zip
34+
rm dist.zip
35+
36+
# Upgrade the Spark Runtime tools
37+
sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
38+
sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
39+
sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
40+
sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
41+
sudo mv ./spark-sdk-dist/hydrate.sh /usr/local/bin/hydrate.sh
42+
sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
43+
sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
44+
sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
45+
sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
46+
47+
# Upgrade the Spark Tools package
48+
if [ -f "$TOOLS_MARKER_FILE" ] && [ "$(cat "$TOOLS_MARKER_FILE")" == "$(cat ./spark-sdk-dist/spark-tools-version)" ]; then
49+
echo "Already at the latest tools version. Skipping extraction."
50+
else
51+
tar -xzf ./spark-sdk-dist/spark-tools.tgz
52+
53+
sudo rm -rf $WORKSPACE_DIR/packages/spark-tools
54+
mkdir -p $WORKSPACE_DIR/packages/spark-tools
55+
sudo mv ./package/* $WORKSPACE_DIR/packages/spark-tools
56+
sudo rmdir ./package
57+
58+
cd $WORKSPACE_DIR
59+
npm i -f
60+
cd - >/dev/null
61+
62+
sudo cp ./spark-sdk-dist/spark-tools-version "$TOOLS_MARKER_FILE"
63+
fi
64+
65+
# Upgrade the GH CLI extension
66+
sudo rm -rf /usr/local/bin/gh-spark-cli
67+
sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
68+
cd /usr/local/bin/gh-spark-cli
69+
# The --force option on gh extension install isn't honored for local, so manually remove it first.
70+
# It's not an issue if that fails though as that probably just means it was the first time running this.
71+
gh extension remove spark-cli >/dev/null || true
72+
gh extension install .
73+
gh alias set spark spark-cli --clobber
74+
cd - >/dev/null
75+
76+
rm -rf $TEMP_DIR
77+
78+
# Update marker file with latest release ID
79+
echo "$RELEASE_ID" | sudo tee "$RELEASE_MARKER_FILE" > /dev/null
80+
81+
echo "Tools installed successfully."

.devcontainer/spark.conf

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
[supervisord]
2+
user=node
3+
4+
[program:vite]
5+
command=npm run dev
6+
directory=/workspaces/spark-template
7+
autostart=true
8+
autorestart=true
9+
stdout_logfile=/var/log/vite.out.log
10+
stderr_logfile=/var/log/vite.err.log
11+
12+
[program:proxy]
13+
command=/usr/local/bin/proxy.js
14+
autostart=true
15+
autorestart=true
16+
stdout_logfile=/var/log/proxy.out.log
17+
stderr_logfile=/var/log/proxy.err.log
18+
19+
[program:spark-server]
20+
command=/usr/local/bin/spark-server
21+
autostart=true
22+
autorestart=true
23+
stdout_logfile=/var/log/spark-server.out.log
24+
stderr_logfile=/var/log/spark-server.err.log
25+
environment=LOG_FILE=/var/log/spark-server.log
26+
27+
[program:spark-designer]
28+
command=/usr/local/bin/spark-designer
29+
autostart=true
30+
autorestart=true
31+
stdout_logfile=/var/log/spark-designer.out.log
32+
stderr_logfile=/var/log/spark-designer.err.log
33+
34+
[program:spark-file-syncer]
35+
command=/usr/local/bin/spark-file-syncer 13000
36+
directory=/workspaces/spark-template
37+
autostart=true
38+
autorestart=true
39+
stdout_logfile=/var/log/spark-file-syncer.out.log
40+
stderr_logfile=/var/log/spark-file-syncer.err.log

.github/dependabot.yml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
version: 2
2+
updates:
3+
- package-ecosystem: "npm"
4+
directory: "/"
5+
schedule:
6+
interval: "daily"
7+
8+
- package-ecosystem: "devcontainers"
9+
directory: "/"
10+
schedule:
11+
interval: "weekly"

.github/workflows/publish.yml

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
name: Publish Release
2+
3+
on:
4+
release:
5+
types: [created]
6+
7+
jobs:
8+
build-and-publish:
9+
runs-on: ubuntu-latest
10+
steps:
11+
- name: Checkout code
12+
uses: actions/checkout@v3
13+
14+
- name: Set up Node.js
15+
uses: actions/setup-node@v3
16+
with:
17+
node-version: '22.x'
18+
registry-url: 'https://registry.npmjs.org'
19+
20+
- name: Checkout github/workbench-template
21+
uses: actions/checkout@v3
22+
with:
23+
repository: github/workbench-template
24+
path: .
25+
token: ${{ secrets.RELEASE_GITHUB_PAT }}
26+
27+
- name: Set up Go
28+
uses: actions/setup-go@v4
29+
with:
30+
go-version: '1.21'
31+
32+
- name: Run build script
33+
env:
34+
RELEASE_GITHUB_PAT: ${{ secrets.RELEASE_GITHUB_PAT }}
35+
run: |
36+
chmod +x ./build/build-release.sh
37+
./build/build-release.sh
38+
39+
- name: Create Release and Upload Assets
40+
uses: softprops/action-gh-release@v1
41+
with:
42+
files: ./spark-sdk-dist.zip
43+
env:
44+
GITHUB_TOKEN: ${{ secrets.RELEASE_GITHUB_PAT }}

.gitignore

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
# Logs
2+
logs
3+
*.log
4+
npm-debug.log*
5+
yarn-debug.log*
6+
yarn-error.log*
7+
pnpm-debug.log*
8+
lerna-debug.log*
9+
10+
node_modules
11+
dist
12+
dist-ssr
13+
*-dist
14+
*.local
15+
16+
# Editor directories and files
17+
.vscode/*
18+
!.vscode/extensions.json
19+
.idea
20+
.DS_Store
21+
*.suo
22+
*.ntvs*
23+
*.njsproj
24+
*.sln
25+
*.sw?
26+
27+
.env
28+
**/agent-eval-report*
29+
packages
30+
.file-manifest

LICENSE

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
MIT License
2+
3+
Copyright GitHub, Inc.
4+
5+
Permission is hereby granted, free of charge, to any person obtaining a copy
6+
of this software and associated documentation files (the "Software"), to deal
7+
in the Software without restriction, including without limitation the rights
8+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
copies of the Software, and to permit persons to whom the Software is
10+
furnished to do so, subject to the following conditions:
11+
12+
The above copyright notice and this permission notice shall be included in all
13+
copies or substantial portions of the Software.
14+
15+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21+
SOFTWARE.

README.md

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# ✨ Welcome to Your Spark Template!
2+
You've just launched your brand-new Spark Template Codespace — everything’s fired up and ready for you to explore, build, and create with Spark!
3+
4+
This template is your blank canvas. It comes with a minimal setup to help you get started quickly with Spark development.
5+
6+
🚀 What's Inside?
7+
- A clean, minimal Spark environment
8+
- Pre-configured for local development
9+
- Ready to scale with your ideas
10+
11+
🧠 What Can You Do?
12+
13+
Right now, this is just a starting point — the perfect place to begin building and testing your Spark applications.
14+
15+
🧹 Just Exploring?
16+
No problem! If you were just checking things out and don’t need to keep this code:
17+
18+
- Simply delete your Spark.
19+
- Everything will be cleaned up — no traces left behind.
20+
21+
📄 License For Spark Template Resources
22+
23+
The Spark Template files and resources from GitHub are licensed under the terms of the MIT license, Copyright GitHub, Inc.

0 commit comments

Comments
 (0)