diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..333d6134f2 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ +.git +.github +.opencode +node_modules +**/node_modules +tmp +dist +**/dist +.env +.env.* diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000000..43b65d6d21 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,65 @@ +name: Bug report +description: Report a problem in OpenWork +title: "[Bug]: " +labels: + - bug +body: + - type: textarea + id: summary + attributes: + label: Summary + description: What's not working / wrong? + validations: + required: true + - type: textarea + id: steps + attributes: + label: To Reproduce + description: "Minimal Steps to reproduce the issue:" + placeholder: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + - type: textarea + id: expected + attributes: + label: Expected behavior + description: A short description of what you expected to happen. + validations: + required: true + - type: textarea + id: actual + attributes: + label: Actual behavior + description: A short description of what actually happened. + validations: + required: true + - type: textarea + id: screenshots + attributes: + label: Screenshots (optional) + description: If applicable, add screenshots or a video to help explain your problem. + validations: + required: false + - type: textarea + id: desktop_info + attributes: + label: OW version & Desktop info (optional) + description: | + Include OS and OpenWork version if possible. + OpenWork version: Settings > General. + placeholder: | + - OpenWork version: [e.g. 0.1.166] + - OS: [e.g. macOS Tahoe 26.2] + validations: + required: false + - type: textarea + id: additional_context + attributes: + label: Additional context (optional) + description: Add any other context about the problem here. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 0000000000..fbbc6fce53 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,68 @@ +name: Feature request +description: Suggest an improvement or new capability +title: "[Feature]: " +labels: + - feature +body: + - type: textarea + id: summary + attributes: + label: Summary + description: Short description of the request. + placeholder: Add ... + validations: + required: true + - type: textarea + id: problem + attributes: + label: Problem / goal + description: What user outcome are you trying to achieve? + validations: + required: true + - type: checkboxes + id: users + attributes: + label: Primary user(s) + options: + - label: Bob (IT / power user) + - label: Susan (non-technical) + - label: Other team roles + - type: textarea + id: opencode_alignment + attributes: + label: OpenCode primitive alignment + description: Is there an existing OpenCode primitive or API that covers this? If not, why is a thin OpenWork layer still needed? + placeholder: session.*, permission.*, skills/plugins, mcp... + validations: + required: true + - type: textarea + id: doc_alignment + attributes: + label: Alignment with VISION/PRINCIPLES/PRODUCT + description: How does this align with `VISION.md`, `PRINCIPLES.md`, and `PRODUCT.md`? + validations: + required: true + - type: textarea + id: testability + attributes: + label: Testability + description: How can we test this? (manual steps, tooling, screenshots) + placeholder: pnpm dev + chrome mcp + screenshots + validations: + required: true + - type: dropdown + id: ready_to_build + attributes: + label: Ready to build it yourself? + options: + - "Yes" + - "No" + validations: + required: true + - type: textarea + id: additional + attributes: + label: Additional context + description: Links, mockups, or related issues. + validations: + required: false diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..fbf625b38a --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,41 @@ +## Summary +- + +## Why +- + +## Issue +- Closes # + +## Scope +- + +## Out of scope +- + +## Testing +### Ran +- `...` + +### Result +- pass/fail: +- if fail, exact files/errors: + +## CI status +- pass: +- code-related failures: +- external/env/auth blockers: + +## Manual verification +1. +2. +3. + +## Evidence +- video/screenshot link, or `N/A (docs-only)` + +## Risk +- + +## Rollback +- diff --git a/.github/workflows/alpha-macos-aarch64.yml b/.github/workflows/alpha-macos-aarch64.yml new file mode 100644 index 0000000000..5405eff284 --- /dev/null +++ b/.github/workflows/alpha-macos-aarch64.yml @@ -0,0 +1,247 @@ +name: Alpha Channel (macOS arm64) + +# Every merge to `dev` publishes a fresh macOS arm64 build to the OpenWork +# alpha release channel. +# +# The alpha channel is macOS-only today. It lives as a rolling GitHub +# release. Each run also updates a small manifest on the fixed +# `alpha-macos-latest` release so the Electron updater feed stays stable while +# historical alpha artifacts remain available. +# +# See: +# - ARCHITECTURE.md#release-channels +# - .github/workflows/release-macos-aarch64.yml (stable channel) + +on: + push: + branches: + - dev + workflow_dispatch: + +permissions: + contents: write + +concurrency: + group: alpha-macos-aarch64-${{ github.ref }} + cancel-in-progress: true + +jobs: + publish-alpha-macos-aarch64: + name: Build + Publish alpha (aarch64-apple-darwin) + runs-on: macos-14 + timeout-minutes: 180 + + env: + ALPHA_RELEASE_TAG: alpha-macos-latest + ALPHA_RELEASE_NAME: OpenWork Alpha (macOS arm64) + # Apple signing + notarization are required so alpha bundles install + # and launch without Gatekeeper friction. Alpha builds are served + # from GitHub Releases like stable, just from a different tag. + MACOS_NOTARIZE: ${{ vars.MACOS_NOTARIZE || 'true' }} + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ github.sha }} + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Bun + uses: oven-sh/setup-bun@v1 + with: + bun-version: "1.3.6" + + - name: Get pnpm store path + id: pnpm-store + shell: bash + run: echo "path=$(pnpm store path --silent)" >> "$GITHUB_OUTPUT" + + - name: Cache pnpm store + uses: actions/cache@v5 + continue-on-error: true + with: + path: ${{ steps.pnpm-store.outputs.path }} + key: macos-pnpm-${{ hashFiles('pnpm-lock.yaml') }} + restore-keys: | + macos-pnpm- + + - name: Install dependencies + run: pnpm install --frozen-lockfile --prefer-offline + + - name: Resolve alpha version + id: alpha-version + shell: bash + env: + GITHUB_RUN_NUMBER: ${{ github.run_number }} + GITHUB_SHA: ${{ github.sha }} + run: | + set -euo pipefail + node <<'NODE' >> "$GITHUB_OUTPUT" + const fs = require("node:fs"); + const path = "apps/desktop/package.json"; + const raw = JSON.parse(fs.readFileSync(path, "utf8")); + const current = String(raw.version || "").trim(); + const match = current.match(/^(\d+)\.(\d+)\.(\d+)(?:-.+)?$/); + if (!match) { + throw new Error(`Unsupported version in ${path}: ${current}`); + } + const [, major, minor, patch] = match; + // Alpha builds advertise the *next* patch version so semver + // comparison makes the alpha newer than the current stable + // (e.g. stable 0.11.207 < alpha 0.11.208-alpha.). Once + // stable 0.11.208 ships, its semver beats the alpha prerelease + // tag and alpha users cleanly migrate forward. + const nextPatch = Number(patch) + 1; + const run = process.env.GITHUB_RUN_NUMBER || "0"; + const sha = (process.env.GITHUB_SHA || "").slice(0, 7) || "local"; + const alpha = `${major}.${minor}.${nextPatch}-alpha.${run}+${sha}`; + const releaseTag = `alpha-macos-v${major}.${minor}.${nextPatch}-alpha.${run}-${sha}`; + console.log(`alpha_version=${alpha}`); + console.log(`base_version=${major}.${minor}.${nextPatch}`); + console.log(`release_tag=${releaseTag}`); + NODE + + - name: Write alpha Electron package version + shell: bash + env: + ALPHA_VERSION: ${{ steps.alpha-version.outputs.alpha_version }} + run: | + set -euo pipefail + node <<'NODE' + const fs = require("node:fs"); + for (const path of ["apps/desktop/package.json", "apps/app/package.json"]) { + const json = JSON.parse(fs.readFileSync(path, "utf8")); + json.version = process.env.ALPHA_VERSION; + fs.writeFileSync(path, `${JSON.stringify(json, null, 2)}\n`); + } + NODE + + - name: Write notary API key + if: env.MACOS_NOTARIZE == 'true' + env: + APPLE_NOTARY_API_KEY_P8_BASE64: ${{ secrets.APPLE_NOTARY_API_KEY_P8_BASE64 }} + run: | + set -euo pipefail + + NOTARY_KEY_PATH="$RUNNER_TEMP/AuthKey.p8" + printf '%s' "$APPLE_NOTARY_API_KEY_P8_BASE64" | base64 --decode > "$NOTARY_KEY_PATH" + chmod 600 "$NOTARY_KEY_PATH" + + echo "NOTARY_KEY_PATH=$NOTARY_KEY_PATH" >> "$GITHUB_ENV" + + - name: Reject unsigned Electron alpha release + if: env.MACOS_NOTARIZE != 'true' + shell: bash + run: | + echo "Electron alpha artifacts must be signed and notarized. Set MACOS_NOTARIZE=true and provide Apple signing secrets." >&2 + exit 1 + + - name: Build Electron alpha app + if: env.MACOS_NOTARIZE == 'true' + shell: bash + env: + TARGET: aarch64-apple-darwin + run: pnpm --filter @openwork/desktop build:electron + + - name: Package Electron alpha (macOS, signed + notarized) + if: env.MACOS_NOTARIZE == 'true' + env: + CSC_LINK: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + MACOS_NOTARIZE: true + APPLE_API_KEY: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_API_ISSUER: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_API_KEY_PATH: ${{ env.NOTARY_KEY_PATH }} + run: | + set -euo pipefail + pnpm --dir apps/desktop exec electron-builder \ + --config electron-builder.yml \ + --mac \ + --arm64 \ + --publish never + + - name: Create immutable alpha prerelease + if: env.MACOS_NOTARIZE == 'true' + shell: bash + env: + GH_TOKEN: ${{ github.token }} + ALPHA_VERSION: ${{ steps.alpha-version.outputs.alpha_version }} + ALPHA_RUN_RELEASE_TAG: ${{ steps.alpha-version.outputs.release_tag }} + run: | + set -euo pipefail + body="Rolling alpha build for OpenWork (macOS arm64) from ${GITHUB_SHA}." + if gh release view "$ALPHA_RUN_RELEASE_TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then + echo "Alpha prerelease $ALPHA_RUN_RELEASE_TAG already exists; reusing it." + exit 0 + fi + gh release create "$ALPHA_RUN_RELEASE_TAG" \ + --repo "$GITHUB_REPOSITORY" \ + --title "OpenWork Alpha ${ALPHA_VERSION}" \ + --notes "$body" \ + --target "$GITHUB_SHA" \ + --prerelease + + - name: Upload Electron alpha updater assets + if: env.MACOS_NOTARIZE == 'true' + env: + GH_TOKEN: ${{ github.token }} + ALPHA_RUN_RELEASE_TAG: ${{ steps.alpha-version.outputs.release_tag }} + run: | + set -euo pipefail + shopt -s nullglob + assets=( + apps/desktop/dist-electron/*.dmg + apps/desktop/dist-electron/*.zip + apps/desktop/dist-electron/*.blockmap + apps/desktop/dist-electron/latest-mac.yml + ) + if [ ${#assets[@]} -eq 0 ]; then + echo "No Electron alpha assets found in apps/desktop/dist-electron" >&2 + exit 1 + fi + gh release upload "$ALPHA_RUN_RELEASE_TAG" "${assets[@]}" \ + --repo "$GITHUB_REPOSITORY" \ + --clobber + + - name: Update alpha updater pointer + if: env.MACOS_NOTARIZE == 'true' + shell: bash + env: + GH_TOKEN: ${{ github.token }} + ALPHA_RUN_RELEASE_TAG: ${{ steps.alpha-version.outputs.release_tag }} + run: | + set -euo pipefail + POINTER_MANIFEST="$RUNNER_TEMP/latest-mac.yml" + export POINTER_MANIFEST + node <<'NODE' + const fs = require("node:fs"); + const manifestPath = "apps/desktop/dist-electron/latest-mac.yml"; + const releaseTag = process.env.ALPHA_RUN_RELEASE_TAG; + const baseUrl = `https://github.com/different-ai/openwork/releases/download/${releaseTag}`; + const rewritten = fs.readFileSync(manifestPath, "utf8") + .replace(/(url:\s*)(openwork-[^\n]+)/g, `$1${baseUrl}/$2`) + .replace(/(path:\s*)(openwork-[^\n]+)/g, `$1${baseUrl}/$2`); + fs.writeFileSync(process.env.POINTER_MANIFEST, rewritten); + NODE + + if ! gh release view "$ALPHA_RELEASE_TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then + gh release create "$ALPHA_RELEASE_TAG" \ + --repo "$GITHUB_REPOSITORY" \ + --title "$ALPHA_RELEASE_NAME" \ + --notes "Stable Electron updater pointer for the newest macOS arm64 alpha prerelease." \ + --target "$GITHUB_SHA" \ + --prerelease + fi + + gh release upload "$ALPHA_RELEASE_TAG" "$POINTER_MANIFEST#latest-mac.yml" \ + --repo "$GITHUB_REPOSITORY" \ + --clobber diff --git a/.github/workflows/aur-validate.yml b/.github/workflows/aur-validate.yml new file mode 100644 index 0000000000..d61c8e1158 --- /dev/null +++ b/.github/workflows/aur-validate.yml @@ -0,0 +1,457 @@ +name: AUR Validate + +on: + workflow_dispatch: + inputs: + ref: + description: "Git ref to validate (branch, SHA, or tag)" + required: false + type: string + default: dev + version: + description: "Package version override (e.g., 0.11.160)" + required: false + type: string + arch: + description: "Target architecture" + required: false + type: choice + options: + - x86_64 + default: x86_64 + mode: + description: "Validation mode" + required: false + type: choice + options: + - smoke + - publish-ready + default: smoke + artifact_source: + description: "Where to source desktop .deb" + required: false + type: choice + options: + - local-build-artifact + - release + default: local-build-artifact + release_tag: + description: "Release tag when artifact_source=release (e.g., v0.11.160)" + required: false + type: string + artifact_run_id: + description: "Workflow run ID to download artifact from when artifact_source=local-build-artifact" + required: false + type: string + artifact_name: + description: "Artifact name in artifact_run_id" + required: false + type: string + default: openwork-desktop-linux-amd64-deb + asset_url_x86_64: + description: "Optional explicit public URL for x86_64 .deb" + required: false + type: string + push_to_aur: + description: "Push validated PKGBUILD/.SRCINFO to AUR" + required: false + type: boolean + default: false + aur_repo: + description: "AUR repo name" + required: false + type: string + default: openwork + + workflow_call: + inputs: + ref: + required: false + type: string + default: dev + version: + required: false + type: string + arch: + required: false + type: string + default: x86_64 + mode: + required: false + type: string + default: smoke + artifact_source: + required: false + type: string + default: local-build-artifact + release_tag: + required: false + type: string + artifact_run_id: + required: false + type: string + artifact_name: + required: false + type: string + default: openwork-desktop-linux-amd64-deb + asset_url_x86_64: + required: false + type: string + push_to_aur: + required: false + type: boolean + default: false + aur_repo: + required: false + type: string + default: openwork + secrets: + AUR_SSH_PRIVATE_KEY: + required: false + +permissions: + contents: read + actions: read + +concurrency: + group: aur-validate-${{ github.workflow }}-${{ inputs.ref || github.ref_name }} + cancel-in-progress: true + +jobs: + aur-validate: + name: Validate AUR package (${{ inputs.arch }}, ${{ inputs.mode }}) + runs-on: ubuntu-22.04 + container: + image: archlinux:latest + env: + TARGET_ARCH: ${{ inputs.arch || 'x86_64' }} + MODE: ${{ inputs.mode || 'smoke' }} + ARTIFACT_SOURCE: ${{ inputs.artifact_source || 'local-build-artifact' }} + INPUT_VERSION: ${{ inputs.version }} + INPUT_RELEASE_TAG: ${{ inputs.release_tag }} + INPUT_ASSET_URL_X86_64: ${{ inputs.asset_url_x86_64 }} + INPUT_ARTIFACT_RUN_ID: ${{ inputs.artifact_run_id }} + INPUT_ARTIFACT_NAME: ${{ inputs.artifact_name || 'openwork-desktop-linux-amd64-deb' }} + PUSH_TO_AUR: ${{ inputs.push_to_aur && 'true' || 'false' }} + AUR_REPO: ${{ inputs.aur_repo || 'openwork' }} + steps: + - name: Validate workflow inputs + shell: bash + run: | + set -euo pipefail + + if [ "${TARGET_ARCH}" != "x86_64" ]; then + echo "Only x86_64 is currently supported." >&2 + exit 1 + fi + + if [ "${MODE}" != "smoke" ] && [ "${MODE}" != "publish-ready" ]; then + echo "mode must be smoke or publish-ready" >&2 + exit 1 + fi + + if [ "${ARTIFACT_SOURCE}" != "local-build-artifact" ] && [ "${ARTIFACT_SOURCE}" != "release" ]; then + echo "artifact_source must be local-build-artifact or release" >&2 + exit 1 + fi + + if [ "${ARTIFACT_SOURCE}" = "local-build-artifact" ] && [ -z "${INPUT_ASSET_URL_X86_64:-}" ] && [ -z "${INPUT_ARTIFACT_RUN_ID:-}" ]; then + echo "artifact_run_id is required when artifact_source=local-build-artifact and asset_url_x86_64 is empty" >&2 + exit 1 + fi + + if [ "${PUSH_TO_AUR}" = "true" ] && [ "${ARTIFACT_SOURCE}" = "local-build-artifact" ] && [ -z "${INPUT_ASSET_URL_X86_64:-}" ]; then + echo "For push_to_aur with local-build-artifact, set asset_url_x86_64 to a public URL (or use artifact_source=release)." >&2 + exit 1 + fi + + - name: Install Arch packaging dependencies + shell: bash + run: | + set -euo pipefail + pacman -Syu --noconfirm --needed \ + base-devel \ + curl \ + git \ + jq \ + namcap \ + openssh \ + python \ + sudo \ + xorg-server-xvfb + + - name: Checkout target ref + uses: actions/checkout@v6 + with: + ref: ${{ inputs.ref || github.ref_name }} + fetch-depth: 0 + + - name: Create non-root makepkg user + shell: bash + run: | + set -euo pipefail + useradd -m -s /bin/bash builder + echo "builder ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/builder + chmod 0440 /etc/sudoers.d/builder + chown -R builder:builder "$GITHUB_WORKSPACE" + + - name: Download release asset (.deb) + if: env.ARTIFACT_SOURCE == 'release' + shell: bash + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + + release_tag="${INPUT_RELEASE_TAG:-}" + if [ -z "$release_tag" ] && [ -n "${INPUT_VERSION:-}" ]; then + release_tag="v${INPUT_VERSION}" + fi + + if [ -z "$release_tag" ]; then + echo "release_tag or version is required when artifact_source=release" >&2 + exit 1 + fi + + case "$release_tag" in + v*) ;; + *) release_tag="v${release_tag}" ;; + esac + + asset_url="${INPUT_ASSET_URL_X86_64:-}" + if [ -z "$asset_url" ]; then + asset_url="https://github.com/${GITHUB_REPOSITORY}/releases/download/${release_tag}/openwork-desktop-linux-amd64.deb" + fi + + mkdir -p /tmp/aur-artifacts + curl -fL --retry 5 --retry-all-errors --retry-delay 2 "$asset_url" -o /tmp/aur-artifacts/openwork-desktop-linux-amd64.deb + + echo "RESOLVED_RELEASE_TAG=${release_tag}" >> "$GITHUB_ENV" + echo "RESOLVED_ASSET_URL_X86_64=${asset_url}" >> "$GITHUB_ENV" + + - name: Download local workflow artifact + if: env.ARTIFACT_SOURCE == 'local-build-artifact' && env.INPUT_ASSET_URL_X86_64 == '' + uses: actions/download-artifact@v8 + with: + name: ${{ inputs.artifact_name || 'openwork-desktop-linux-amd64-deb' }} + run-id: ${{ inputs.artifact_run_id }} + github-token: ${{ github.token }} + path: /tmp/aur-artifacts + + - name: Download x86_64 asset URL override + if: env.ARTIFACT_SOURCE == 'local-build-artifact' && env.INPUT_ASSET_URL_X86_64 != '' + shell: bash + run: | + set -euo pipefail + mkdir -p /tmp/aur-artifacts + curl -fL --retry 5 --retry-all-errors --retry-delay 2 "${INPUT_ASSET_URL_X86_64}" -o /tmp/aur-artifacts/openwork-desktop-linux-amd64.deb + + - name: Resolve local artifact path + if: env.ARTIFACT_SOURCE == 'local-build-artifact' + shell: bash + run: | + set -euo pipefail + deb_path=$(find /tmp/aur-artifacts -maxdepth 3 -type f -name '*.deb' | head -n 1) + if [ -z "${deb_path:-}" ]; then + echo "No .deb file found in /tmp/aur-artifacts" >&2 + exit 1 + fi + + if [ "$deb_path" != "/tmp/aur-artifacts/openwork-desktop-linux-amd64.deb" ]; then + cp "$deb_path" /tmp/aur-artifacts/openwork-desktop-linux-amd64.deb + fi + + - name: Resolve version, URL, and checksum + id: resolve + shell: bash + run: | + set -euo pipefail + + deb_path="/tmp/aur-artifacts/openwork-desktop-linux-amd64.deb" + if [ ! -f "$deb_path" ]; then + echo "Expected $deb_path to exist" >&2 + exit 1 + fi + + version="${INPUT_VERSION:-}" + if [ -z "$version" ] && [ -n "${RESOLVED_RELEASE_TAG:-}" ]; then + version="${RESOLVED_RELEASE_TAG#v}" + fi + if [ -z "$version" ]; then + version="$(awk -F= '$1 == "pkgver" {print $2; exit}' packaging/aur/PKGBUILD)" + fi + + if [ -z "$version" ]; then + echo "Unable to determine version. Pass inputs.version." >&2 + exit 1 + fi + + sha256="$(sha256sum "$deb_path" | awk '{print $1}')" + + source_url="${RESOLVED_ASSET_URL_X86_64:-}" + if [ -z "$source_url" ] && [ -n "${INPUT_ASSET_URL_X86_64:-}" ]; then + source_url="${INPUT_ASSET_URL_X86_64}" + fi + if [ -z "$source_url" ]; then + source_url="https://github.com/${GITHUB_REPOSITORY}/releases/download/v${version}/openwork-desktop-linux-amd64.deb" + fi + + echo "version=${version}" >> "$GITHUB_OUTPUT" + echo "sha256=${sha256}" >> "$GITHUB_OUTPUT" + echo "source_url=${source_url}" >> "$GITHUB_OUTPUT" + + - name: Patch PKGBUILD for x86_64 validation + shell: bash + env: + RESOLVED_VERSION: ${{ steps.resolve.outputs.version }} + RESOLVED_SHA256: ${{ steps.resolve.outputs.sha256 }} + RESOLVED_SOURCE_URL: ${{ steps.resolve.outputs.source_url }} + run: | + set -euo pipefail + python3 - <<'PY' + import pathlib + import re + import os + + path = pathlib.Path("packaging/aur/PKGBUILD") + text = path.read_text() + + version = os.environ["RESOLVED_VERSION"] + sha256 = os.environ["RESOLVED_SHA256"] + source_url = os.environ["RESOLVED_SOURCE_URL"] + + text = re.sub(r"^pkgver=.*$", f"pkgver={version}", text, flags=re.M) + text = re.sub(r"^pkgrel=.*$", "pkgrel=1", text, flags=re.M) + text = re.sub(r"^arch=.*$", "arch=('x86_64')", text, flags=re.M) + text = re.sub( + r"^source_x86_64=.*$", + "source_x86_64=(\"${pkgname}-${pkgver}.deb::" + source_url + "\")", + text, + flags=re.M, + ) + text = re.sub( + r"^sha256sums_x86_64=.*$", + f"sha256sums_x86_64=('{sha256}')", + text, + flags=re.M, + ) + text = re.sub(r"^source_aarch64=.*\n?", "", text, flags=re.M) + text = re.sub(r"^sha256sums_aarch64=.*\n?", "", text, flags=re.M) + + path.write_text(text) + PY + + - name: Regenerate .SRCINFO + shell: bash + run: | + set -euo pipefail + workspace_path="$GITHUB_WORKSPACE" + sudo -u builder bash -lc "cd '$workspace_path/packaging/aur' && makepkg --printsrcinfo > .SRCINFO" + + - name: Build package with makepkg + shell: bash + run: | + set -euo pipefail + workspace_path="$GITHUB_WORKSPACE" + sudo -u builder bash -lc "cd '$workspace_path/packaging/aur' && makepkg -f --syncdeps --noconfirm --cleanbuild" + + - name: Run namcap checks + if: env.MODE == 'publish-ready' + shell: bash + run: | + set -euo pipefail + cd packaging/aur + namcap PKGBUILD || true + pkg_file=$(ls -1 openwork-*.pkg.tar.* | head -n 1) + namcap "$pkg_file" || true + + - name: Install built package + shell: bash + run: | + set -euo pipefail + cd packaging/aur + pkg_file=$(ls -1 openwork-*.pkg.tar.* | head -n 1) + pacman -U --noconfirm "$pkg_file" + + - name: Smoke launch + shell: bash + run: | + set -euo pipefail + pacman -Ql openwork > /tmp/openwork-installed-files.txt + + if [ ! -s /tmp/openwork-installed-files.txt ]; then + echo "openwork package install listing is empty" >&2 + exit 1 + fi + + launch_bin="" + for candidate in \ + "$(command -v openwork 2>/dev/null || true)" \ + /usr/bin/openwork \ + /opt/openwork/openwork \ + /opt/openwork/openwork-desktop \ + /opt/openwork/opencode + do + if [ -n "$candidate" ] && [ -x "$candidate" ]; then + launch_bin="$candidate" + break + fi + done + + if [ -n "$launch_bin" ]; then + xvfb-run -a bash -lc '"$1" --help >/tmp/openwork-help.txt 2>&1 || "$1" --version >/tmp/openwork-version.txt 2>&1 || true' -- "$launch_bin" + else + echo "No runnable desktop binary found; install sanity check passed." + fi + + - name: Publish to AUR + if: env.PUSH_TO_AUR == 'true' + env: + AUR_SSH_PRIVATE_KEY: ${{ secrets.AUR_SSH_PRIVATE_KEY }} + RELEASE_TAG: v${{ steps.resolve.outputs.version }} + AUR_SKIP_UPDATE: "1" + shell: bash + run: | + set -euo pipefail + + if [ -z "${AUR_SSH_PRIVATE_KEY:-}" ]; then + echo "AUR_SSH_PRIVATE_KEY not set; cannot push to AUR." >&2 + exit 1 + fi + + mkdir -p "$HOME/.ssh" + touch "$HOME/.ssh/known_hosts" + ssh-keygen -R aur.archlinux.org >/dev/null 2>&1 || true + ssh-keyscan -t rsa,ecdsa,ed25519 aur.archlinux.org >> "$HOME/.ssh/known_hosts" 2>/dev/null + + tmp_dir="$(mktemp -d)" + trap 'rm -rf "$tmp_dir"' EXIT + + key_path="$tmp_dir/aur.key" + printf '%s\n' "$AUR_SSH_PRIVATE_KEY" > "$key_path" + chmod 600 "$key_path" + + aur_remote="ssh://aur@aur.archlinux.org/${AUR_REPO}.git" + export GIT_SSH_COMMAND="ssh -i $key_path -o IdentitiesOnly=yes -o StrictHostKeyChecking=accept-new" + + git clone "$aur_remote" "$tmp_dir/aur" + cp packaging/aur/PKGBUILD "$tmp_dir/aur/PKGBUILD" + cp packaging/aur/.SRCINFO "$tmp_dir/aur/.SRCINFO" + + cd "$tmp_dir/aur" + if git diff --quiet -- PKGBUILD .SRCINFO; then + echo "AUR already up to date for ${AUR_REPO}." + exit 0 + fi + + git add PKGBUILD .SRCINFO + git -c user.name="OpenWork Release Bot" \ + -c user.email="release-bot@users.noreply.github.com" \ + commit -m "chore(aur): update PKGBUILD for ${RELEASE_TAG#v}" + + current_branch="$(git symbolic-ref --short HEAD 2>/dev/null || true)" + if [ -z "$current_branch" ]; then + current_branch="master" + fi + + git push origin "HEAD:${current_branch}" diff --git a/.github/workflows/build-electron-desktop.yml b/.github/workflows/build-electron-desktop.yml new file mode 100644 index 0000000000..9f4364baba --- /dev/null +++ b/.github/workflows/build-electron-desktop.yml @@ -0,0 +1,114 @@ +name: Build Electron Desktop + +on: + workflow_dispatch: + push: + branches: + - electron-notary-test + paths: + - apps/app/** + - apps/desktop/** + - packages/ui/** + - constants.json + - pnpm-lock.yaml + - package.json + - .github/workflows/build-electron-desktop.yml + +jobs: + build-electron: + strategy: + fail-fast: false + matrix: + include: + - os: macos-latest + artifact: macos + - os: ubuntu-latest + artifact: linux + - os: windows-latest + artifact: windows + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + # pnpm must be installed BEFORE setup-node so setup-node can find the + # pnpm binary on PATH. Matches the pattern in build-desktop.yml so + # the two workflows share the same bootstrap story. + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.9 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Package Electron desktop (unpacked) + run: pnpm --filter @openwork/desktop package:electron:dir + + - name: Upload Electron artifacts + uses: actions/upload-artifact@v4 + with: + name: openwork-electron-${{ matrix.artifact }} + path: apps/desktop/dist-electron/** + + electron-macos-notarization-smoke: + name: Electron macOS notarization smoke + runs-on: macos-14 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.9 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Write notary API key + env: + APPLE_NOTARY_API_KEY_P8_BASE64: ${{ secrets.APPLE_NOTARY_API_KEY_P8_BASE64 }} + run: | + set -euo pipefail + + NOTARY_KEY_PATH="$RUNNER_TEMP/AuthKey.p8" + printf '%s' "$APPLE_NOTARY_API_KEY_P8_BASE64" | base64 --decode > "$NOTARY_KEY_PATH" + chmod 600 "$NOTARY_KEY_PATH" + + echo "NOTARY_KEY_PATH=$NOTARY_KEY_PATH" >> "$GITHUB_ENV" + + - name: Package Electron desktop (signed + notarized, no publish) + env: + CSC_LINK: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + MACOS_NOTARIZE: true + APPLE_API_KEY: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_API_ISSUER: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_API_KEY_PATH: ${{ env.NOTARY_KEY_PATH }} + run: | + set -euo pipefail + pnpm --filter @openwork/desktop run build:electron + pnpm --dir apps/desktop exec electron-builder --config electron-builder.yml --mac dmg zip --publish never diff --git a/.github/workflows/ci-i18n.yml b/.github/workflows/ci-i18n.yml new file mode 100644 index 0000000000..3a4eb10a41 --- /dev/null +++ b/.github/workflows/ci-i18n.yml @@ -0,0 +1,28 @@ +name: i18n Audit + +on: + pull_request: + branches: + - dev + push: + branches: + - dev + +permissions: + contents: read + +jobs: + i18n-audit: + runs-on: blacksmith-4vcpu-ubuntu-2204 + + steps: + - name: Checkout + uses: actions/checkout@v6 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Run i18n audit + run: node scripts/i18n-audit.mjs --ci diff --git a/.github/workflows/ci-openwork-ui-mcp.yml b/.github/workflows/ci-openwork-ui-mcp.yml new file mode 100644 index 0000000000..739bd8bf1b --- /dev/null +++ b/.github/workflows/ci-openwork-ui-mcp.yml @@ -0,0 +1,77 @@ +name: openwork-ui-mcp + +on: + push: + branches: [dev] + paths: + - "packages/openwork-ui-mcp/**" + - ".github/workflows/ci-openwork-ui-mcp.yml" + tags: + - "openwork-ui-mcp-v*" + pull_request: + branches: [dev] + paths: + - "packages/openwork-ui-mcp/**" + - ".github/workflows/ci-openwork-ui-mcp.yml" + +permissions: + contents: read + +defaults: + run: + working-directory: packages/openwork-ui-mcp + +jobs: + check: + name: Syntax & dry-run publish + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version-file: .nvmrc + registry-url: https://registry.npmjs.org + + - name: Install dependencies + run: npm install --ignore-scripts + + - name: Syntax check + run: node --check index.mjs + + - name: Dry-run publish + run: npm publish --dry-run --access public + + publish: + name: Publish to npm + needs: check + if: startsWith(github.ref, 'refs/tags/openwork-ui-mcp-v') + runs-on: ubuntu-latest + + permissions: + contents: read + id-token: write + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version-file: .nvmrc + registry-url: https://registry.npmjs.org + + - name: Install dependencies + run: npm install --ignore-scripts + + - name: Syntax check + run: node --check index.mjs + + - name: Publish + run: npm publish --access public --provenance + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml new file mode 100644 index 0000000000..e7d4ae15df --- /dev/null +++ b/.github/workflows/ci-tests.yml @@ -0,0 +1,118 @@ +name: OpenWork Tests + +on: + pull_request: + branches: + - dev + push: + branches: + - dev + +permissions: + contents: read + +jobs: + openwork-tests: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: [blacksmith-4vcpu-ubuntu-2204, macos-14] + + steps: + - name: Checkout + uses: actions/checkout@v6 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Install OpenCode CLI + shell: bash + env: + GITHUB_TOKEN: ${{ github.token }} + OPENCODE_GITHUB_REPO: ${{ vars.OPENCODE_GITHUB_REPO || 'anomalyco/opencode' }} + run: | + set -euo pipefail + + repo="${OPENCODE_GITHUB_REPO:-anomalyco/opencode}" + version="$(node -e "const fs=require('fs'); const parsed=JSON.parse(fs.readFileSync('constants.json','utf8')); process.stdout.write(String(parsed.opencodeVersion||'').trim().replace(/^v/,''));")" + version="$(echo "$version" | tr -d '\r\n' | sed 's/^v//')" + + if [ -z "$version" ]; then + echo "Unable to resolve OpenCode version from constants.json." >&2 + exit 1 + fi + + arch="$(uname -m)" + case "${RUNNER_OS}" in + Linux) + if [ "$arch" = "aarch64" ] || [ "$arch" = "arm64" ]; then + opencode_asset="opencode-linux-arm64.tar.gz" + else + opencode_asset="opencode-linux-x64-baseline.tar.gz" + fi + ;; + macOS) + if [ "$arch" = "arm64" ]; then + opencode_asset="opencode-darwin-arm64.zip" + else + opencode_asset="opencode-darwin-x64-baseline.zip" + fi + ;; + *) + echo "Unsupported OS: ${RUNNER_OS}" >&2 + exit 1 + ;; + esac + + url="https://github.com/${repo}/releases/download/v${version}/${opencode_asset}" + tmp_dir="$RUNNER_TEMP/opencode" + extract_dir="$tmp_dir/extracted" + rm -rf "$tmp_dir" + mkdir -p "$extract_dir" + + curl_headers=() + if [ -n "${GITHUB_TOKEN:-}" ]; then + curl_headers+=( -H "Authorization: Bearer ${GITHUB_TOKEN}" ) + fi + + curl -fsSL --retry 5 --retry-all-errors --retry-delay 2 "${curl_headers[@]}" -o "$tmp_dir/$opencode_asset" "$url" + + if [[ "$opencode_asset" == *.tar.gz ]]; then + tar -xzf "$tmp_dir/$opencode_asset" -C "$extract_dir" + else + unzip -q "$tmp_dir/$opencode_asset" -d "$extract_dir" + fi + + if [ -f "$extract_dir/opencode" ]; then + bin_path="$extract_dir/opencode" + elif [ -f "$extract_dir/opencode.exe" ]; then + bin_path="$extract_dir/opencode.exe" + else + echo "OpenCode binary not found in archive" >&2 + ls -la "$extract_dir" + exit 1 + fi + + install_dir="$HOME/.opencode/bin" + mkdir -p "$install_dir" + cp "$bin_path" "$install_dir/opencode" + chmod 755 "$install_dir/opencode" + echo "$install_dir" >> "$GITHUB_PATH" + + - name: Verify OpenCode + run: opencode --version + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Run e2e tests + run: pnpm --filter @openwork/app test:e2e diff --git a/.github/workflows/deploy-den.yml b/.github/workflows/deploy-den.yml new file mode 100644 index 0000000000..4ed04072bd --- /dev/null +++ b/.github/workflows/deploy-den.yml @@ -0,0 +1,99 @@ +name: Deploy Den + +on: + workflow_call: + inputs: + daytona_snapshot: + description: "Daytona snapshot name to promote into Render" + required: true + type: string + workflow_dispatch: + inputs: + daytona_snapshot: + description: "Daytona snapshot name to promote into Render" + required: true + type: string + +permissions: + contents: read + +concurrency: + group: deploy-den-${{ inputs.daytona_snapshot }} + cancel-in-progress: false + +jobs: + deploy-den: + name: Update Render Daytona Snapshot + runs-on: blacksmith-4vcpu-ubuntu-2404 + env: + RENDER_API_BASE: https://api.render.com/v1 + RENDER_API_KEY: ${{ secrets.RENDER_API_KEY }} + RENDER_SERVICE_ID: ${{ secrets.RENDER_DEN_CONTROL_PLANE_SERVICE_ID }} + DAYTONA_SNAPSHOT: ${{ inputs.daytona_snapshot }} + steps: + - name: Validate required configuration + shell: bash + run: | + set -euo pipefail + + if [ -z "${DAYTONA_SNAPSHOT:-}" ]; then + echo "daytona_snapshot input is required" >&2 + exit 1 + fi + + if [ -z "${RENDER_API_KEY:-}" ]; then + echo "Missing required secret: RENDER_API_KEY" >&2 + exit 1 + fi + + if [ -z "${RENDER_SERVICE_ID:-}" ]; then + echo "Missing required secret: RENDER_DEN_CONTROL_PLANE_SERVICE_ID" >&2 + exit 1 + fi + + - name: Update DAYTONA_SNAPSHOT on Render + shell: bash + run: | + set -euo pipefail + + payload="$(python3 -c 'import json, sys; print(json.dumps({"value": sys.argv[1]}))' "$DAYTONA_SNAPSHOT")" + response_file="$(mktemp)" + status_code="$(curl -sS -o "$response_file" -w "%{http_code}" \ + -X PUT "${RENDER_API_BASE}/services/${RENDER_SERVICE_ID}/env-vars/DAYTONA_SNAPSHOT" \ + -H "Accept: application/json" \ + -H "Authorization: Bearer ${RENDER_API_KEY}" \ + -H "Content-Type: application/json" \ + --data "$payload")" + + if [ "$status_code" -lt 200 ] || [ "$status_code" -ge 300 ]; then + echo "Failed to update Render DAYTONA_SNAPSHOT (HTTP $status_code)" >&2 + python3 -c 'from pathlib import Path; import sys; print(Path(sys.argv[1]).read_text(errors="replace"))' "$response_file" + exit 1 + fi + + echo "Render DAYTONA_SNAPSHOT set to ${DAYTONA_SNAPSHOT}" + + - name: Trigger Render deploy + id: deploy + shell: bash + run: | + set -euo pipefail + + response_file="$(mktemp)" + status_code="$(curl -sS -o "$response_file" -w "%{http_code}" \ + -X POST "${RENDER_API_BASE}/services/${RENDER_SERVICE_ID}/deploys" \ + -H "Accept: application/json" \ + -H "Authorization: Bearer ${RENDER_API_KEY}" \ + -H "Content-Type: application/json" \ + --data '{}')" + + if [ "$status_code" -lt 200 ] || [ "$status_code" -ge 300 ]; then + echo "Failed to trigger Render deploy (HTTP $status_code)" >&2 + python3 -c 'from pathlib import Path; import sys; print(Path(sys.argv[1]).read_text(errors="replace"))' "$response_file" + exit 1 + fi + + deploy_id="$(python3 -c 'import json, sys; from pathlib import Path; text = Path(sys.argv[1]).read_text(errors="replace").strip(); data = json.loads(text) if text else {}; print(data.get("id", "") if isinstance(data, dict) else "")' "$response_file")" + + echo "deploy_id=${deploy_id}" >> "$GITHUB_OUTPUT" + echo "Triggered Render deploy ${deploy_id:-} for snapshot ${DAYTONA_SNAPSHOT}" diff --git a/.github/workflows/download-stats.yml b/.github/workflows/download-stats.yml new file mode 100644 index 0000000000..a965d04b70 --- /dev/null +++ b/.github/workflows/download-stats.yml @@ -0,0 +1,43 @@ +name: Download Stats + +on: + schedule: + - cron: "0 12 * * *" # Run daily at 12:00 UTC + workflow_dispatch: + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +jobs: + stats: + if: github.repository == 'different-ai/openwork' + runs-on: blacksmith-4vcpu-ubuntu-2404 + permissions: + contents: write + + steps: + - name: Checkout + uses: actions/checkout@v6 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Run stats script + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }} + POSTHOG_HOST: https://us.i.posthog.com + POSTHOG_LEGACY_EVENT: download + POSTHOG_V2_EVENT: release_asset_snapshot + POSTHOG_DISTINCT_ID: openwork-download + GITHUB_REPO: different-ai/openwork + run: node scripts/stats.mjs + + - name: Commit stats + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add STATS.md STATS_V2.md + git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)" + git push diff --git a/.github/workflows/opencode-agents.yml b/.github/workflows/opencode-agents.yml new file mode 100644 index 0000000000..8abafe4299 --- /dev/null +++ b/.github/workflows/opencode-agents.yml @@ -0,0 +1,106 @@ +name: Opencode Agents + +on: + workflow_dispatch: + inputs: + target: + description: Run issue triage or duplicate PR check + required: true + type: choice + options: + - issue + - pull_request + number: + description: Issue or pull request number + required: true + type: number + +jobs: + triage-issue: + if: inputs.target == 'issue' + runs-on: blacksmith-4vcpu-ubuntu-2404 + permissions: + contents: read + issues: write + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 1 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Install opencode + run: | + version="$(node -e "const fs=require('fs'); const parsed=JSON.parse(fs.readFileSync('constants.json','utf8')); process.stdout.write(String(parsed.opencodeVersion||'').trim().replace(/^v/,''));")" + curl -fsSL https://opencode.ai/install | bash -s -- --version "$version" --no-modify-path + + - name: Triage issue + env: + OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBER: ${{ inputs.number }} + run: | + cat > /tmp/issue_prompt.txt <<'PROMPT_EOF' + The following issue was just opened, triage it: + PROMPT_EOF + ISSUE_TITLE="$(gh issue view "$ISSUE_NUMBER" --json title --jq .title)" + ISSUE_BODY="$(gh issue view "$ISSUE_NUMBER" --json body --jq .body)" + printf '\nTitle: %s\n\n%s\n' "$ISSUE_TITLE" "$ISSUE_BODY" >> /tmp/issue_prompt.txt + opencode run --agent triage "$(cat /tmp/issue_prompt.txt)" + + duplicate-prs: + if: inputs.target == 'pull_request' + runs-on: blacksmith-4vcpu-ubuntu-2404 + permissions: + contents: read + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 1 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Install opencode + run: | + version="$(node -e "const fs=require('fs'); const parsed=JSON.parse(fs.readFileSync('constants.json','utf8')); process.stdout.write(String(parsed.opencodeVersion||'').trim().replace(/^v/,''));")" + curl -fsSL https://opencode.ai/install | bash -s -- --version "$version" --no-modify-path + + - name: Build prompt + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ inputs.number }} + run: | + { + echo "Check for duplicate PRs related to this new PR:" + echo "" + echo "CURRENT_PR_NUMBER: $PR_NUMBER" + echo "" + echo "Title: $(gh pr view \"$PR_NUMBER\" --json title --jq .title)" + echo "" + echo "Description:" + gh pr view "$PR_NUMBER" --json body --jq .body + } > pr_info.txt + + - name: Check for duplicate PRs + env: + OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ inputs.number }} + run: | + opencode run --agent duplicate-pr "$(cat pr_info.txt)" > /tmp/comment_output.txt + + { + echo "_The following comment was made by an LLM, it may be inaccurate:_" + echo "" + cat /tmp/comment_output.txt + } > /tmp/comment_body.txt + gh pr comment "$PR_NUMBER" --body-file /tmp/comment_body.txt diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml new file mode 100644 index 0000000000..36de01c42e --- /dev/null +++ b/.github/workflows/prerelease.yml @@ -0,0 +1,327 @@ +name: PreRelease App + +on: + push: + branches: + - dev + - feat/windows-sidecar + +permissions: + contents: write + +concurrency: + group: prerelease-${{ github.ref }} + cancel-in-progress: true + +jobs: + prepare-release: + name: Prepare Prerelease + runs-on: blacksmith-4vcpu-ubuntu-2404 + + outputs: + release_tag: ${{ steps.prerelease-meta.outputs.release_tag }} + release_name: ${{ steps.prerelease-meta.outputs.release_name }} + release_body: ${{ steps.prerelease-meta.outputs.release_body }} + + steps: + - name: Set prerelease metadata + id: prerelease-meta + shell: bash + run: | + set -euo pipefail + + short_sha=$(echo "$GITHUB_SHA" | cut -c1-7) + tag="v${short_sha}-dev" + name="OpenWork ${tag}" + body="Automated prerelease from ${GITHUB_REF_NAME} (${GITHUB_SHA})." + + echo "RELEASE_TAG=$tag" >> "$GITHUB_ENV" + echo "RELEASE_NAME=$name" >> "$GITHUB_ENV" + { + echo "RELEASE_BODY<<__OPENWORK_RELEASE_BODY_EOF__" + echo "$body" + echo "__OPENWORK_RELEASE_BODY_EOF__" + } >> "$GITHUB_ENV" + + echo "release_tag=$tag" >> "$GITHUB_OUTPUT" + echo "release_name=$name" >> "$GITHUB_OUTPUT" + { + echo "release_body<<__OPENWORK_RELEASE_BODY_EOF__" + echo "$body" + echo "__OPENWORK_RELEASE_BODY_EOF__" + } >> "$GITHUB_OUTPUT" + + - name: Create prerelease + shell: bash + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + + BODY_FILE="$RUNNER_TEMP/release_body.md" + printf '%s\n' "$RELEASE_BODY" > "$BODY_FILE" + + if gh release view "$RELEASE_TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then + echo "Prerelease $RELEASE_TAG already exists; skipping create." + exit 0 + fi + + gh release create "$RELEASE_TAG" \ + --repo "$GITHUB_REPOSITORY" \ + --title "$RELEASE_NAME" \ + --notes-file "$BODY_FILE" \ + --prerelease + + publish-tauri: + name: Build + Publish (${{ matrix.target }}) + needs: prepare-release + # Set OPENWORK_LINUX_X64_RUNNER_LABEL to route only the Linux x86_64 build to a larger runner. + runs-on: ${{ matrix.target == 'x86_64-unknown-linux-gnu' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL != '' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL || matrix.platform }} + timeout-minutes: 360 + + env: + RELEASE_TAG: ${{ needs.prepare-release.outputs.release_tag }} + RELEASE_NAME: ${{ needs.prepare-release.outputs.release_name }} + RELEASE_BODY: ${{ needs.prepare-release.outputs.release_body }} + MACOS_NOTARIZE: ${{ vars.MACOS_NOTARIZE || 'false' }} + OPENCODE_GITHUB_REPO: ${{ vars.OPENCODE_GITHUB_REPO || 'anomalyco/opencode' }} + + strategy: + fail-fast: false + matrix: + include: + - platform: macos-14 + os_type: macos + target: aarch64-apple-darwin + args: "--target aarch64-apple-darwin --bundles dmg,app" + - platform: macos-14 + os_type: macos + target: x86_64-apple-darwin + args: "--target x86_64-apple-darwin --bundles dmg,app" + - platform: ubuntu-22.04 + os_type: linux + target: x86_64-unknown-linux-gnu + args: "--target x86_64-unknown-linux-gnu --bundles deb,rpm" + - platform: ubuntu-22.04-arm + os_type: linux + target: aarch64-unknown-linux-gnu + args: "--target aarch64-unknown-linux-gnu --bundles deb,rpm" + - platform: windows-2022 + os_type: windows + target: x86_64-pc-windows-msvc + args: "--target x86_64-pc-windows-msvc --bundles msi" + + steps: + - name: Log runner selection + shell: bash + run: | + echo "Requested larger runner label: ${RUNNER_LABEL:-}" + echo "Effective runs-on: ${EFFECTIVE_RUNS_ON}" + env: + RUNNER_LABEL: ${{ vars.OPENWORK_LINUX_X64_RUNNER_LABEL }} + EFFECTIVE_RUNS_ON: ${{ matrix.target == 'x86_64-unknown-linux-gnu' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL != '' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL || matrix.platform }} + + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ github.sha }} + + - name: Enable git long paths (Windows) + if: matrix.os_type == 'windows' + shell: pwsh + run: git config --global core.longpaths true + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Bun + uses: oven-sh/setup-bun@v1 + with: + bun-version: "1.3.6" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Install OpenTUI x64 core (macOS x86_64) + if: matrix.os_type == 'macos' && matrix.target == 'x86_64-apple-darwin' + run: pnpm add -w --ignore-workspace-root-check @opentui/core-darwin-x64@0.1.77 + + - name: Install Linux build dependencies + if: matrix.os_type == 'linux' + run: | + sudo apt-get update + sudo apt-get install -y \ + libgtk-3-dev \ + libglib2.0-dev \ + libayatana-appindicator3-dev \ + libsoup-3.0-dev \ + libwebkit2gtk-4.1-dev \ + libssl-dev \ + rpm \ + libdbus-1-dev \ + librsvg2-dev + + - name: Setup Rust + uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.target }} + + - name: Resolve OpenCode version + id: opencode-version + shell: bash + run: | + node <<'NODE' >> "$GITHUB_OUTPUT" + const fs = require('fs'); + const parsed = JSON.parse(fs.readFileSync('./constants.json', 'utf8')); + const version = String(parsed.opencodeVersion || '').replace(/^v/, '').trim(); + if (!version) { + throw new Error('Pinned OpenCode version is missing from constants.json'); + } + console.log('version=' + version); + NODE + + - name: Download OpenCode sidecar + shell: bash + env: + PINNED_OPENCODE_VERSION: ${{ steps.opencode-version.outputs.version }} + run: | + set -euo pipefail + + case "${{ matrix.target }}" in + aarch64-apple-darwin) + opencode_asset="opencode-darwin-arm64.zip" + ;; + x86_64-apple-darwin) + opencode_asset="opencode-darwin-x64-baseline.zip" + ;; + x86_64-unknown-linux-gnu) + opencode_asset="opencode-linux-x64-baseline.tar.gz" + ;; + aarch64-unknown-linux-gnu) + opencode_asset="opencode-linux-arm64.tar.gz" + ;; + x86_64-pc-windows-msvc) + opencode_asset="opencode-windows-x64-baseline.zip" + ;; + *) + echo "Unsupported target: ${{ matrix.target }}" + exit 1 + ;; + esac + + repo="${OPENCODE_GITHUB_REPO:-anomalyco/opencode}" + url="https://github.com/${repo}/releases/download/v${PINNED_OPENCODE_VERSION}/${opencode_asset}" + tmp_dir="$RUNNER_TEMP/opencode" + extract_dir="$tmp_dir/extracted" + rm -rf "$tmp_dir" + mkdir -p "$extract_dir" + curl -fsSL --retry 5 --retry-all-errors --retry-delay 2 -o "$tmp_dir/$opencode_asset" "$url" + + if [[ "$opencode_asset" == *.tar.gz ]]; then + tar -xzf "$tmp_dir/$opencode_asset" -C "$extract_dir" + else + if command -v unzip >/dev/null 2>&1; then + unzip -q "$tmp_dir/$opencode_asset" -d "$extract_dir" + elif command -v 7z >/dev/null 2>&1; then + 7z x "$tmp_dir/$opencode_asset" -o"$extract_dir" >/dev/null + else + echo "No unzip utility available" + exit 1 + fi + fi + + if [ -f "$extract_dir/opencode" ]; then + bin_path="$extract_dir/opencode" + elif [ -f "$extract_dir/opencode.exe" ]; then + bin_path="$extract_dir/opencode.exe" + else + echo "OpenCode binary not found in archive" + ls -la "$extract_dir" + exit 1 + fi + + target_name="opencode-${{ matrix.target }}" + if [ "${{ matrix.os_type }}" = "windows" ]; then + target_name="${target_name}.exe" + fi + + mkdir -p apps/desktop/src-tauri/sidecars + cp "$bin_path" "apps/desktop/src-tauri/sidecars/${target_name}" + chmod 755 "apps/desktop/src-tauri/sidecars/${target_name}" + + - name: Write notary API key + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE == 'true' + env: + APPLE_NOTARY_API_KEY_P8_BASE64: ${{ secrets.APPLE_NOTARY_API_KEY_P8_BASE64 }} + run: | + set -euo pipefail + + NOTARY_KEY_PATH="$RUNNER_TEMP/AuthKey.p8" + printf '%s' "$APPLE_NOTARY_API_KEY_P8_BASE64" | base64 --decode > "$NOTARY_KEY_PATH" + chmod 600 "$NOTARY_KEY_PATH" + + echo "NOTARY_KEY_PATH=$NOTARY_KEY_PATH" >> "$GITHUB_ENV" + + - name: Build + upload (notarized) + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE == 'true' + uses: tauri-apps/tauri-action@v0.5.17 + env: + CI: true + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Tauri updater signing + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + + # macOS signing + APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} + APPLE_CERTIFICATE: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + + # macOS notarization (App Store Connect API key) + APPLE_API_KEY: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_API_ISSUER: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_API_KEY_PATH: ${{ env.NOTARY_KEY_PATH }} + with: + tagName: ${{ env.RELEASE_TAG }} + releaseName: ${{ env.RELEASE_NAME }} + releaseBody: ${{ env.RELEASE_BODY }} + prerelease: true + releaseDraft: false + projectPath: apps/desktop + tauriScript: pnpm exec tauri -vvv + args: ${{ matrix.args }} + retryAttempts: 3 + + - name: Build + upload + if: matrix.os_type != 'macos' || env.MACOS_NOTARIZE != 'true' + uses: tauri-apps/tauri-action@v0.5.17 + env: + CI: true + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Tauri updater signing + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + + # macOS signing + APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} + APPLE_CERTIFICATE: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + with: + tagName: ${{ env.RELEASE_TAG }} + releaseName: ${{ env.RELEASE_NAME }} + releaseBody: ${{ env.RELEASE_BODY }} + prerelease: true + releaseDraft: false + projectPath: apps/desktop + tauriScript: pnpm exec tauri -vvv + args: ${{ matrix.args }} + retryAttempts: 3 diff --git a/.github/workflows/release-daytona-snapshot.yml b/.github/workflows/release-daytona-snapshot.yml new file mode 100644 index 0000000000..4d8baa3ac1 --- /dev/null +++ b/.github/workflows/release-daytona-snapshot.yml @@ -0,0 +1,172 @@ +name: Release Daytona Snapshot + +on: + workflow_call: + inputs: + tag: + description: "Tag to build from (e.g., v0.11.200). Defaults to current ref." + required: false + type: string + deploy_den: + description: "Whether to promote the published snapshot into the Den Render service" + required: false + type: boolean + default: true + snapshot_name: + description: "Optional explicit Daytona snapshot name" + required: false + type: string + snapshot_region: + description: "Optional Daytona region override for snapshot push" + required: false + type: string + workflow_dispatch: + inputs: + tag: + description: "Tag to build from (e.g., v0.11.200). Defaults to release tag/current ref." + required: false + type: string + deploy_den: + description: "Whether to promote the published snapshot into the Den Render service" + required: false + type: boolean + default: true + snapshot_name: + description: "Optional explicit Daytona snapshot name" + required: false + type: string + snapshot_region: + description: "Optional Daytona region override for snapshot push" + required: false + type: string + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ inputs.tag || github.ref_name }} + cancel-in-progress: false + +jobs: + publish-daytona-snapshot: + name: Build and Push Daytona Snapshot + runs-on: blacksmith-4vcpu-ubuntu-2404 + outputs: + release_tag: ${{ steps.resolve.outputs.release_tag }} + snapshot_name: ${{ steps.resolve.outputs.snapshot_name }} + snapshot_region: ${{ steps.resolve.outputs.snapshot_region }} + steps: + - name: Resolve release tag and snapshot name + id: resolve + shell: bash + env: + INPUT_TAG: ${{ inputs.tag }} + INPUT_SNAPSHOT_NAME: ${{ inputs.snapshot_name }} + INPUT_SNAPSHOT_REGION: ${{ inputs.snapshot_region }} + SNAPSHOT_NAME_BASE: ${{ vars.DAYTONA_SNAPSHOT_NAME_BASE }} + DEFAULT_SNAPSHOT_REGION: ${{ vars.DAYTONA_SNAPSHOT_REGION }} + run: | + set -euo pipefail + + tag="${INPUT_TAG:-}" + if [ -z "$tag" ]; then + tag="${GITHUB_REF_NAME}" + fi + if [[ "$tag" != v* ]]; then + tag="v${tag}" + fi + if [[ ! "$tag" =~ ^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?$ ]]; then + echo "Invalid release tag: $tag" >&2 + exit 1 + fi + + base_name="${SNAPSHOT_NAME_BASE:-openwork}" + if [ -n "${INPUT_SNAPSHOT_NAME:-}" ]; then + snapshot_name="${INPUT_SNAPSHOT_NAME}" + else + snapshot_name="${base_name}-${tag#v}" + fi + snapshot_region="${INPUT_SNAPSHOT_REGION:-${DEFAULT_SNAPSHOT_REGION:-}}" + + echo "release_tag=$tag" >> "$GITHUB_OUTPUT" + echo "snapshot_name=$snapshot_name" >> "$GITHUB_OUTPUT" + echo "snapshot_region=$snapshot_region" >> "$GITHUB_OUTPUT" + + - name: Checkout release source + uses: actions/checkout@v6 + with: + fetch-depth: 0 + ref: ${{ steps.resolve.outputs.release_tag }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Install Daytona CLI + shell: bash + run: | + set -euo pipefail + + case "$(uname -s)" in + Linux) platform="linux" ;; + Darwin) platform="darwin" ;; + *) + echo "Unsupported OS: $(uname -s)" >&2 + exit 1 + ;; + esac + + case "$(uname -m)" in + x86_64|amd64) arch="amd64" ;; + aarch64|arm64) arch="arm64" ;; + *) + echo "Unsupported architecture: $(uname -m)" >&2 + exit 1 + ;; + esac + + asset_name="daytona-${platform}-${arch}" + install_dir="$HOME/.local/bin" + mkdir -p "$install_dir" + + release_json="$(curl -fsSL https://api.github.com/repos/daytonaio/daytona/releases/latest)" + asset_url="$(python3 -c 'import json, sys; data = json.load(sys.stdin); name = sys.argv[1]; print(next(asset["browser_download_url"] for asset in data["assets"] if asset["name"] == name))' "$asset_name" <<<"$release_json")" + + curl -fL "$asset_url" -o "$install_dir/daytona" + chmod +x "$install_dir/daytona" + + echo "$install_dir" >> "$GITHUB_PATH" + export PATH="$install_dir:$PATH" + + daytona version + + - name: Build and push snapshot + shell: bash + env: + DAYTONA_API_KEY: ${{ secrets.DAYTONA_API_KEY }} + DAYTONA_API_URL: ${{ vars.DAYTONA_API_URL }} + DAYTONA_TARGET: ${{ vars.DAYTONA_TARGET }} + DAYTONA_SNAPSHOT_REGION: ${{ steps.resolve.outputs.snapshot_region }} + DAYTONA_SNAPSHOT_NAME: ${{ steps.resolve.outputs.snapshot_name }} + run: | + set -euo pipefail + + if [ -z "${DAYTONA_API_KEY:-}" ]; then + echo "Missing required secret: DAYTONA_API_KEY" >&2 + exit 1 + fi + + export DAYTONA_CONFIG_DIR="$RUNNER_TEMP/daytona" + mkdir -p "$DAYTONA_CONFIG_DIR" + daytona login --api-key "$DAYTONA_API_KEY" + + echo "Publishing Daytona snapshot: ${DAYTONA_SNAPSHOT_NAME}" + ./scripts/create-daytona-openwork-snapshot.sh "${DAYTONA_SNAPSHOT_NAME}" + + deploy-den: + name: Promote Daytona Snapshot to Den Render Service + needs: [publish-daytona-snapshot] + if: ${{ inputs.deploy_den }} + uses: ./.github/workflows/deploy-den.yml + with: + daytona_snapshot: ${{ needs.publish-daytona-snapshot.outputs.snapshot_name }} + secrets: inherit diff --git a/.github/workflows/release-macos-aarch64.yml b/.github/workflows/release-macos-aarch64.yml new file mode 100644 index 0000000000..f2c22e31a3 --- /dev/null +++ b/.github/workflows/release-macos-aarch64.yml @@ -0,0 +1,1167 @@ +name: Release App + +on: + push: + tags: + - "v*" + workflow_dispatch: + inputs: + tag: + description: "Tag to release (e.g., v0.1.2). Leave empty to use current ref." + required: false + type: string + release_name: + description: "Release title (defaults to OpenWork )" + required: false + type: string + release_body: + description: "Release notes body in Markdown (defaults to a short placeholder)" + required: false + type: string + draft: + description: "Create the GitHub Release as a draft" + required: false + type: boolean + default: false + prerelease: + description: "Mark the GitHub Release as a prerelease" + required: false + type: boolean + default: false + notarize: + description: "Notarize macOS builds (requires Apple team configured)" + required: false + type: boolean + default: true + build_tauri: + description: "Build desktop (Tauri) artifacts" + required: false + type: boolean + default: true + publish_sidecars: + description: "Build + upload openwork-orchestrator sidecar release assets" + required: false + type: boolean + default: true + publish_npm: + description: "Publish openwork-orchestrator/openwork-server/opencode-router to npm if versions changed" + required: false + type: boolean + default: true + publish_daytona_snapshot: + description: "Build + push Daytona worker snapshot" + required: false + type: boolean + default: true + publish_electron: + description: "Build + publish Electron desktop artifacts (macOS/Linux/Windows) alongside Tauri" + required: false + type: boolean + default: false + +permissions: + contents: write + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + resolve-release: + name: Resolve Release Metadata + runs-on: blacksmith-4vcpu-ubuntu-2404 + outputs: + release_tag: ${{ steps.resolve.outputs.release_tag }} + release_name: ${{ steps.resolve.outputs.release_name }} + release_body: ${{ steps.resolve.outputs.release_body }} + draft: ${{ steps.resolve.outputs.draft }} + prerelease: ${{ steps.resolve.outputs.prerelease }} + notarize: ${{ steps.resolve.outputs.notarize }} + build_tauri: ${{ steps.resolve.outputs.build_tauri }} + publish_sidecars: ${{ steps.resolve.outputs.publish_sidecars }} + publish_npm: ${{ steps.resolve.outputs.publish_npm }} + publish_daytona_snapshot: ${{ steps.resolve.outputs.publish_daytona_snapshot }} + steps: + - name: Resolve metadata + id: resolve + shell: bash + env: + INPUT_TAG: ${{ github.event.inputs.tag }} + INPUT_RELEASE_NAME: ${{ github.event.inputs.release_name }} + INPUT_RELEASE_BODY: ${{ github.event.inputs.release_body }} + INPUT_DRAFT: ${{ github.event.inputs.draft }} + INPUT_PRERELEASE: ${{ github.event.inputs.prerelease }} + INPUT_NOTARIZE: ${{ github.event.inputs.notarize }} + INPUT_BUILD_TAURI: ${{ github.event.inputs.build_tauri }} + INPUT_PUBLISH_SIDECARS: ${{ github.event.inputs.publish_sidecars }} + INPUT_PUBLISH_NPM: ${{ github.event.inputs.publish_npm }} + INPUT_PUBLISH_DAYTONA_SNAPSHOT: ${{ github.event.inputs.publish_daytona_snapshot }} + DEFAULT_PUBLISH_SIDECARS: ${{ vars.RELEASE_PUBLISH_SIDECARS }} + DEFAULT_PUBLISH_NPM: ${{ vars.RELEASE_PUBLISH_NPM }} + DEFAULT_PUBLISH_DAYTONA_SNAPSHOT: ${{ vars.RELEASE_PUBLISH_DAYTONA_SNAPSHOT }} + DEFAULT_NOTARIZE: ${{ vars.MACOS_NOTARIZE }} + DEFAULT_BUILD_TAURI: ${{ vars.RELEASE_BUILD_TAURI }} + run: | + set -euo pipefail + + TAG_INPUT="${INPUT_TAG:-}" + if [ -n "$TAG_INPUT" ]; then + if [[ "$TAG_INPUT" == v* ]]; then + TAG="$TAG_INPUT" + else + TAG="v$TAG_INPUT" + fi + else + TAG="${GITHUB_REF_NAME}" + fi + + if [[ ! "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?$ ]]; then + echo "Invalid release tag: $TAG (expected vX.Y.Z)" >&2 + exit 1 + fi + + RELEASE_NAME_INPUT="${INPUT_RELEASE_NAME:-}" + if [ -n "$RELEASE_NAME_INPUT" ]; then + RELEASE_NAME="$RELEASE_NAME_INPUT" + else + RELEASE_NAME="OpenWork $TAG" + fi + + RELEASE_BODY_INPUT="${INPUT_RELEASE_BODY:-}" + if [ -n "$RELEASE_BODY_INPUT" ]; then + RELEASE_BODY="$RELEASE_BODY_INPUT" + else + printf -v RELEASE_BODY '%s\n\nOpenWork %s desktop release.\n\n%s\n%s\n%s' \ + "## What's new" \ + "$TAG" \ + "- Download the attached installer for your platform." \ + "- Electron builds use the attached latest-mac.yml updater feed when Electron publishing is enabled." \ + "- Tauri builds use the attached latest.json updater manifest." + fi + + draft="${INPUT_DRAFT:-}" + if [ -z "$draft" ]; then + if [ "${GITHUB_EVENT_NAME}" = "push" ]; then + # Keep tag-triggered releases out of /releases/latest until assets + latest.json are ready. + draft="true" + else + draft="false" + fi + fi + prerelease="${INPUT_PRERELEASE:-false}" + notarize="${INPUT_NOTARIZE:-}" + if [ -z "$notarize" ]; then + notarize="${DEFAULT_NOTARIZE:-true}" + fi + + build_tauri="${INPUT_BUILD_TAURI:-}" + if [ -z "$build_tauri" ]; then + build_tauri="${DEFAULT_BUILD_TAURI:-true}" + fi + + publish_sidecars="${INPUT_PUBLISH_SIDECARS:-}" + if [ -z "$publish_sidecars" ]; then + publish_sidecars="${DEFAULT_PUBLISH_SIDECARS:-true}" + fi + publish_npm="${INPUT_PUBLISH_NPM:-}" + if [ -z "$publish_npm" ]; then + publish_npm="${DEFAULT_PUBLISH_NPM:-true}" + fi + + publish_daytona_snapshot="${INPUT_PUBLISH_DAYTONA_SNAPSHOT:-}" + if [ -z "$publish_daytona_snapshot" ]; then + publish_daytona_snapshot="${DEFAULT_PUBLISH_DAYTONA_SNAPSHOT:-true}" + fi + + TAG="${TAG//$'\n'/}" + TAG="${TAG//$'\r'/}" + RELEASE_NAME="${RELEASE_NAME//$'\n'/ }" + RELEASE_NAME="${RELEASE_NAME//$'\r'/ }" + + echo "release_tag=$TAG" >> "$GITHUB_OUTPUT" + echo "release_name=$RELEASE_NAME" >> "$GITHUB_OUTPUT" + echo "draft=$draft" >> "$GITHUB_OUTPUT" + echo "prerelease=$prerelease" >> "$GITHUB_OUTPUT" + echo "notarize=$notarize" >> "$GITHUB_OUTPUT" + echo "build_tauri=$build_tauri" >> "$GITHUB_OUTPUT" + echo "publish_sidecars=$publish_sidecars" >> "$GITHUB_OUTPUT" + echo "publish_npm=$publish_npm" >> "$GITHUB_OUTPUT" + echo "publish_daytona_snapshot=$publish_daytona_snapshot" >> "$GITHUB_OUTPUT" + { + echo "release_body<<__OPENWORK_RELEASE_BODY_EOF__" + printf '%s\n' "$RELEASE_BODY" + echo "__OPENWORK_RELEASE_BODY_EOF__" + } >> "$GITHUB_OUTPUT" + + - name: Create release if missing + shell: bash + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + + BODY_FILE="$RUNNER_TEMP/release_body.md" + printf '%s\n' "${{ steps.resolve.outputs.release_body }}" > "$BODY_FILE" + + if gh release view "${{ steps.resolve.outputs.release_tag }}" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then + echo "Release ${{ steps.resolve.outputs.release_tag }} already exists; skipping create." + exit 0 + fi + + DRAFT_FLAG="" + PRERELEASE_FLAG="" + if [ "${{ steps.resolve.outputs.draft }}" = "true" ]; then + DRAFT_FLAG="--draft" + fi + if [ "${{ steps.resolve.outputs.prerelease }}" = "true" ]; then + PRERELEASE_FLAG="--prerelease" + fi + + gh release create "${{ steps.resolve.outputs.release_tag }}" \ + --repo "$GITHUB_REPOSITORY" \ + --title "${{ steps.resolve.outputs.release_name }}" \ + --notes-file "$BODY_FILE" \ + $DRAFT_FLAG $PRERELEASE_FLAG + + verify-release: + name: Verify Release Versions + needs: resolve-release + runs-on: blacksmith-4vcpu-ubuntu-2404 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ env.RELEASE_TAG }} + fetch-depth: 0 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Verify tag matches app versions + run: node scripts/release/verify-tag.mjs --tag "$RELEASE_TAG" + + - name: Release review (strict) + run: node scripts/release/review.mjs --strict + + publish-tauri: + name: Build + Publish (${{ matrix.target }}) + needs: [resolve-release, verify-release] + if: needs.resolve-release.outputs.build_tauri == 'true' + # Set OPENWORK_LINUX_X64_RUNNER_LABEL to route only the Linux x86_64 build to a larger runner. + runs-on: ${{ matrix.target == 'x86_64-unknown-linux-gnu' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL != '' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL || matrix.platform }} + timeout-minutes: 360 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + RELEASE_NAME: ${{ needs.resolve-release.outputs.release_name }} + RELEASE_BODY: ${{ needs.resolve-release.outputs.release_body }} + RELEASE_DRAFT: ${{ needs.resolve-release.outputs.draft }} + RELEASE_PRERELEASE: ${{ needs.resolve-release.outputs.prerelease }} + MACOS_NOTARIZE: ${{ needs.resolve-release.outputs.notarize }} + # Ensure Tauri's beforeBuildCommand (prepare:sidecar) uses our fork. + OPENCODE_GITHUB_REPO: ${{ vars.OPENCODE_GITHUB_REPO || 'anomalyco/opencode' }} + + strategy: + fail-fast: false + matrix: + include: + - platform: macos-14 + os_type: macos + target: aarch64-apple-darwin + args: "--target aarch64-apple-darwin --bundles dmg,app" + - platform: macos-14 + os_type: macos + target: x86_64-apple-darwin + args: "--target x86_64-apple-darwin --bundles dmg,app" + - platform: ubuntu-22.04 + os_type: linux + target: x86_64-unknown-linux-gnu + args: "--target x86_64-unknown-linux-gnu --bundles deb,rpm" + - platform: ubuntu-22.04-arm + os_type: linux + target: aarch64-unknown-linux-gnu + args: "--target aarch64-unknown-linux-gnu --bundles deb,rpm" + - platform: windows-2022 + os_type: windows + target: x86_64-pc-windows-msvc + args: "--target x86_64-pc-windows-msvc --bundles msi" + + steps: + - name: Log runner selection + shell: bash + run: | + echo "Requested larger runner label: ${RUNNER_LABEL:-}" + echo "Effective runs-on: ${EFFECTIVE_RUNS_ON}" + env: + RUNNER_LABEL: ${{ vars.OPENWORK_LINUX_X64_RUNNER_LABEL }} + EFFECTIVE_RUNS_ON: ${{ matrix.target == 'x86_64-unknown-linux-gnu' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL != '' && vars.OPENWORK_LINUX_X64_RUNNER_LABEL || matrix.platform }} + + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ env.RELEASE_TAG }} + fetch-depth: 0 + + - name: Enable git long paths (Windows) + if: matrix.os_type == 'windows' + shell: pwsh + run: git config --global core.longpaths true + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Bun + uses: oven-sh/setup-bun@v1 + with: + bun-version: "1.3.6" + + - name: Get pnpm store path + id: pnpm-store + shell: bash + run: echo "path=$(pnpm store path --silent)" >> "$GITHUB_OUTPUT" + + - name: Cache pnpm store + uses: actions/cache@v5 + continue-on-error: true + with: + path: ${{ steps.pnpm-store.outputs.path }} + key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm- + + - name: Cache cargo + uses: actions/cache@v5 + continue-on-error: true + with: + path: | + ~/.cargo/registry + ~/.cargo/git + apps/desktop/src-tauri/target + key: ${{ runner.os }}-cargo-${{ hashFiles('apps/desktop/src-tauri/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Install dependencies + run: pnpm install --frozen-lockfile --prefer-offline + + - name: Install OpenTUI x64 core (macOS x86_64) + if: matrix.os_type == 'macos' && matrix.target == 'x86_64-apple-darwin' + run: pnpm add -w --ignore-workspace-root-check @opentui/core-darwin-x64@0.1.77 + + - name: Install Linux build dependencies + if: matrix.os_type == 'linux' + run: | + sudo apt-get update + sudo apt-get install -y \ + libgtk-3-dev \ + libglib2.0-dev \ + libayatana-appindicator3-dev \ + libsoup-3.0-dev \ + libwebkit2gtk-4.1-dev \ + libssl-dev \ + rpm \ + libdbus-1-dev \ + librsvg2-dev + + - name: Setup Rust + uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.target }} + + - name: Resolve OpenCode version + id: opencode-version + shell: bash + run: | + node <<'NODE' >> "$GITHUB_OUTPUT" + const fs = require('fs'); + const parsed = JSON.parse(fs.readFileSync('./constants.json', 'utf8')); + const version = String(parsed.opencodeVersion || '').replace(/^v/, '').trim(); + if (!version) { + throw new Error('Pinned OpenCode version is missing from constants.json'); + } + console.log('version=' + version); + NODE + + - name: Download OpenCode sidecar + shell: bash + env: + PINNED_OPENCODE_VERSION: ${{ steps.opencode-version.outputs.version }} + OPENCODE_GITHUB_REPO: ${{ vars.OPENCODE_GITHUB_REPO || 'anomalyco/opencode' }} + run: | + set -euo pipefail + + case "${{ matrix.target }}" in + aarch64-apple-darwin) + opencode_asset="opencode-darwin-arm64.zip" + ;; + x86_64-apple-darwin) + opencode_asset="opencode-darwin-x64-baseline.zip" + ;; + x86_64-unknown-linux-gnu) + opencode_asset="opencode-linux-x64-baseline.tar.gz" + ;; + aarch64-unknown-linux-gnu) + opencode_asset="opencode-linux-arm64.tar.gz" + ;; + x86_64-pc-windows-msvc) + opencode_asset="opencode-windows-x64-baseline.zip" + ;; + *) + echo "Unsupported target: ${{ matrix.target }}" >&2 + exit 1 + ;; + esac + + repo="${OPENCODE_GITHUB_REPO:-anomalyco/opencode}" + url="https://github.com/${repo}/releases/download/v${PINNED_OPENCODE_VERSION}/${opencode_asset}" + tmp_dir="$RUNNER_TEMP/opencode" + extract_dir="$tmp_dir/extracted" + rm -rf "$tmp_dir" + mkdir -p "$extract_dir" + curl -fsSL --retry 5 --retry-all-errors --retry-delay 2 -o "$tmp_dir/$opencode_asset" "$url" + + if [[ "$opencode_asset" == *.tar.gz ]]; then + tar -xzf "$tmp_dir/$opencode_asset" -C "$extract_dir" + else + if command -v unzip >/dev/null 2>&1; then + unzip -q "$tmp_dir/$opencode_asset" -d "$extract_dir" + elif command -v 7z >/dev/null 2>&1; then + 7z x "$tmp_dir/$opencode_asset" -o"$extract_dir" >/dev/null + else + echo "No unzip utility available" >&2 + exit 1 + fi + fi + + if [ -f "$extract_dir/opencode" ]; then + bin_path="$extract_dir/opencode" + elif [ -f "$extract_dir/opencode.exe" ]; then + bin_path="$extract_dir/opencode.exe" + else + echo "OpenCode binary not found in archive" >&2 + ls -la "$extract_dir" + exit 1 + fi + + target_name="opencode-${{ matrix.target }}" + if [ "${{ matrix.os_type }}" = "windows" ]; then + target_name="${target_name}.exe" + fi + + mkdir -p apps/desktop/src-tauri/sidecars + cp "$bin_path" "apps/desktop/src-tauri/sidecars/${target_name}" + chmod 755 "apps/desktop/src-tauri/sidecars/${target_name}" + + - name: Write notary API key + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE == 'true' + env: + APPLE_NOTARY_API_KEY_P8_BASE64: ${{ secrets.APPLE_NOTARY_API_KEY_P8_BASE64 }} + APPLE_NOTARY_API_KEY_ID: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_NOTARY_API_ISSUER_ID: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_CODESIGN_CERT_P12_BASE64: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + APPLE_CODESIGN_CERT_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} + run: | + set -euo pipefail + + missing=() + for name in \ + APPLE_NOTARY_API_KEY_P8_BASE64 \ + APPLE_NOTARY_API_KEY_ID \ + APPLE_NOTARY_API_ISSUER_ID \ + APPLE_CODESIGN_CERT_P12_BASE64 \ + APPLE_CODESIGN_CERT_PASSWORD \ + APPLE_SIGNING_IDENTITY; do + if [ -z "${!name:-}" ]; then + missing+=("$name") + fi + done + if [ "${#missing[@]}" -gt 0 ]; then + printf 'Missing macOS notarization/signing secrets: %s\n' "${missing[*]}" >&2 + exit 1 + fi + + NOTARY_KEY_PATH="$RUNNER_TEMP/AuthKey.p8" + printf '%s' "$APPLE_NOTARY_API_KEY_P8_BASE64" | base64 --decode > "$NOTARY_KEY_PATH" + chmod 600 "$NOTARY_KEY_PATH" + + echo "NOTARY_KEY_PATH=$NOTARY_KEY_PATH" >> "$GITHUB_ENV" + + - name: Reject unnotarized macOS Tauri release + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE != 'true' + shell: bash + run: | + echo "macOS release assets must be notarized. Re-run with notarize=true or set MACOS_NOTARIZE=true." >&2 + exit 1 + + - name: Build + upload (notarized) + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE == 'true' + uses: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a + env: + CI: true + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Tauri updater signing + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + + # macOS signing + APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} + APPLE_CERTIFICATE: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + + # macOS notarization (App Store Connect API key) + APPLE_API_KEY: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_API_ISSUER: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_API_KEY_PATH: ${{ env.NOTARY_KEY_PATH }} + with: + tagName: ${{ env.RELEASE_TAG }} + releaseName: ${{ env.RELEASE_NAME }} + releaseBody: ${{ env.RELEASE_BODY }} + releaseDraft: ${{ env.RELEASE_DRAFT == 'true' }} + prerelease: ${{ env.RELEASE_PRERELEASE == 'true' }} + projectPath: apps/desktop + tauriScript: pnpm exec tauri -vvv + args: ${{ matrix.args }} + retryAttempts: 3 + uploadUpdaterJson: false + updaterJsonPreferNsis: true + releaseAssetNamePattern: openwork-desktop-[platform]-[arch][ext] + + - name: Build + upload + if: matrix.os_type != 'macos' + uses: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a + env: + CI: true + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Tauri updater signing + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + + # macOS signing + APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} + APPLE_CERTIFICATE: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + with: + tagName: ${{ env.RELEASE_TAG }} + releaseName: ${{ env.RELEASE_NAME }} + releaseBody: ${{ env.RELEASE_BODY }} + releaseDraft: ${{ env.RELEASE_DRAFT == 'true' }} + prerelease: ${{ env.RELEASE_PRERELEASE == 'true' }} + projectPath: apps/desktop + tauriScript: pnpm exec tauri -vvv + args: ${{ matrix.args }} + retryAttempts: 3 + uploadUpdaterJson: false + updaterJsonPreferNsis: true + releaseAssetNamePattern: openwork-desktop-[platform]-[arch][ext] + + - name: Verify versions.json bundled (macOS) + if: success() && matrix.os_type == 'macos' + shell: bash + run: | + set -euo pipefail + + tmp_dir="$RUNNER_TEMP/openwork-bundle-verify" + archive_path="apps/desktop/src-tauri/target/${{ matrix.target }}/release/bundle/macos/OpenWork.app.tar.gz" + + if [ ! -f "$archive_path" ]; then + echo "ERROR: updater archive missing from local build output: $archive_path" >&2 + exit 1 + fi + + rm -rf "$tmp_dir" + mkdir -p "$tmp_dir" + + tar -xzf "$archive_path" -C "$tmp_dir" + + app_path="$tmp_dir/OpenWork.app" + manifest_path="$app_path/Contents/Resources/versions.json" + + if [ ! -f "$manifest_path" ]; then + echo "ERROR: versions.json missing from app bundle: $manifest_path" >&2 + echo "Hint: ensure apps/desktop/src-tauri/tauri.conf.json bundles sidecars/versions.json as a resource" >&2 + exit 1 + fi + + echo "Found bundled versions.json at $manifest_path" + codesign --verify --deep --strict --verbose=2 "$app_path" + spctl -a -vv -t execute "$app_path" + xcrun stapler validate "$app_path" + + publish-updater-json: + name: Publish consolidated latest.json + needs: [resolve-release, verify-release, publish-tauri] + if: needs.resolve-release.outputs.build_tauri == 'true' + runs-on: blacksmith-4vcpu-ubuntu-2404 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ env.RELEASE_TAG }} + fetch-depth: 0 + + - name: Generate latest.json from release assets + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + node scripts/release/generate-latest-json.mjs \ + --tag "$RELEASE_TAG" \ + --repo "$GITHUB_REPOSITORY" \ + --output "$RUNNER_TEMP/latest.json" + + - name: Upload latest.json + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + gh release upload "$RELEASE_TAG" "$RUNNER_TEMP/latest.json#latest.json" \ + --repo "$GITHUB_REPOSITORY" \ + --clobber + + publish-electron: + name: Build + publish Electron (${{ matrix.artifact }}) + # Runs alongside the Tauri jobs so the same release carries both + # latest.json (Tauri updater) AND latest-*.yml (electron-updater). + # Gated on RELEASE_PUBLISH_ELECTRON=true (repo var) OR the workflow + # input of the same name so opt-in during rollout, opt-out if a + # non-migration release doesn't want Electron artifacts. + needs: [resolve-release, verify-release] + if: ${{ vars.RELEASE_PUBLISH_ELECTRON == 'true' || github.event.inputs.publish_electron == 'true' }} + runs-on: ${{ matrix.platform }} + timeout-minutes: 120 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + MACOS_NOTARIZE: ${{ needs.resolve-release.outputs.notarize }} + + strategy: + fail-fast: false + matrix: + include: + - platform: macos-14 + os_type: macos + artifact: electron-macos-arm64 + electron_args: "--mac --arm64" + target_triple: aarch64-apple-darwin + - platform: macos-14 + os_type: macos + artifact: electron-macos-x64 + electron_args: "--mac --x64" + target_triple: x86_64-apple-darwin + - platform: ubuntu-22.04 + os_type: linux + artifact: electron-linux-x64 + electron_args: "--linux --x64" + target_triple: x86_64-unknown-linux-gnu + - platform: ubuntu-22.04-arm + os_type: linux + artifact: electron-linux-arm64 + electron_args: "--linux --arm64" + target_triple: aarch64-unknown-linux-gnu + - platform: windows-2022 + os_type: windows + artifact: electron-windows-x64 + electron_args: "--win --x64" + target_triple: x86_64-pc-windows-msvc + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ env.RELEASE_TAG }} + fetch-depth: 0 + + - name: Enable git long paths (Windows) + if: matrix.os_type == 'windows' + shell: pwsh + run: git config --global core.longpaths true + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.9 + + - name: Get pnpm store path + id: pnpm-store + shell: bash + run: echo "path=$(pnpm store path --silent)" >> "$GITHUB_OUTPUT" + + - name: Cache pnpm store + uses: actions/cache@v5 + continue-on-error: true + with: + path: ${{ steps.pnpm-store.outputs.path }} + key: ${{ runner.os }}-electron-pnpm-${{ hashFiles('pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-electron-pnpm- + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Write Electron notary API key (macOS) + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE == 'true' + env: + APPLE_NOTARY_API_KEY_P8_BASE64: ${{ secrets.APPLE_NOTARY_API_KEY_P8_BASE64 }} + APPLE_NOTARY_API_KEY_ID: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_NOTARY_API_ISSUER_ID: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_CODESIGN_CERT_P12_BASE64: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + APPLE_CODESIGN_CERT_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + run: | + set -euo pipefail + + missing=() + for name in \ + APPLE_NOTARY_API_KEY_P8_BASE64 \ + APPLE_NOTARY_API_KEY_ID \ + APPLE_NOTARY_API_ISSUER_ID \ + APPLE_CODESIGN_CERT_P12_BASE64 \ + APPLE_CODESIGN_CERT_PASSWORD; do + if [ -z "${!name:-}" ]; then + missing+=("$name") + fi + done + if [ "${#missing[@]}" -gt 0 ]; then + printf 'Missing Electron macOS notarization/signing secrets: %s\n' "${missing[*]}" >&2 + exit 1 + fi + + NOTARY_KEY_PATH="$RUNNER_TEMP/AuthKey.p8" + printf '%s' "$APPLE_NOTARY_API_KEY_P8_BASE64" | base64 --decode > "$NOTARY_KEY_PATH" + chmod 600 "$NOTARY_KEY_PATH" + + echo "NOTARY_KEY_PATH=$NOTARY_KEY_PATH" >> "$GITHUB_ENV" + + - name: Reject unnotarized macOS Electron release + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE != 'true' + shell: bash + run: | + echo "macOS Electron release assets must be notarized. Re-run with notarize=true or set MACOS_NOTARIZE=true." >&2 + exit 1 + + - name: Build Electron app + shell: bash + env: + # TARGET tells prepare-sidecar.mjs which architecture's sidecars + # to download (critical for cross-arch builds like x64 on arm64 mac). + TARGET: ${{ matrix.target_triple }} + run: pnpm --filter @openwork/desktop build:electron + + - name: Package + publish Electron (macOS, signed + notarized) + if: matrix.os_type == 'macos' && env.MACOS_NOTARIZE == 'true' + env: + CSC_LINK: ${{ secrets.APPLE_CODESIGN_CERT_P12_BASE64 }} + CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODESIGN_CERT_PASSWORD }} + APPLE_API_KEY: ${{ secrets.APPLE_NOTARY_API_KEY_ID }} + APPLE_API_ISSUER: ${{ secrets.APPLE_NOTARY_API_ISSUER_ID }} + APPLE_API_KEY_PATH: ${{ env.NOTARY_KEY_PATH }} + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + pnpm --dir apps/desktop exec electron-builder \ + --config electron-builder.yml \ + ${{ matrix.electron_args }} \ + --publish always + + - name: Package + publish Electron (Linux) + if: matrix.os_type == 'linux' + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + pnpm --dir apps/desktop exec electron-builder \ + --config electron-builder.yml \ + ${{ matrix.electron_args }} \ + --publish always + + - name: Package + publish Electron (Windows) + if: matrix.os_type == 'windows' + env: + GH_TOKEN: ${{ github.token }} + shell: bash + run: | + set -euo pipefail + pnpm --dir apps/desktop exec electron-builder \ + --config electron-builder.yml \ + ${{ matrix.electron_args }} \ + --publish always + + release-orchestrator-sidecars: + name: Build + Upload openwork-orchestrator Sidecars + needs: [resolve-release, verify-release] + if: needs.resolve-release.outputs.publish_sidecars == 'true' + runs-on: blacksmith-4vcpu-ubuntu-2404 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ env.RELEASE_TAG }} + fetch-depth: 0 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Bun + uses: oven-sh/setup-bun@v1 + with: + bun-version: "1.3.6" + + - name: Get pnpm store path + id: pnpm-store + shell: bash + run: echo "path=$(pnpm store path --silent)" >> "$GITHUB_OUTPUT" + + - name: Cache pnpm store + uses: actions/cache@v5 + continue-on-error: true + with: + path: ${{ steps.pnpm-store.outputs.path }} + key: ubuntu-pnpm-${{ hashFiles('pnpm-lock.yaml') }} + restore-keys: | + ubuntu-pnpm- + + - name: Install dependencies + run: pnpm install --frozen-lockfile --prefer-offline + + - name: Resolve sidecar versions + id: sidecar-versions + shell: bash + run: | + node -e "const fs=require('fs'); const orchestrator=JSON.parse(fs.readFileSync('apps/orchestrator/package.json','utf8')); const server=JSON.parse(fs.readFileSync('apps/server/package.json','utf8')); const opencodeRouter=JSON.parse(fs.readFileSync('apps/opencode-router/package.json','utf8')); console.log('orchestrator=' + orchestrator.version); console.log('server=' + server.version); console.log('opencodeRouter=' + opencodeRouter.version);" >> "$GITHUB_OUTPUT" + + - name: Resolve SOURCE_DATE_EPOCH + id: source-date + shell: bash + run: | + epoch=$(git show -s --format=%ct "${RELEASE_TAG}") + echo "epoch=$epoch" >> "$GITHUB_OUTPUT" + + - name: Check openwork-orchestrator release + id: orchestrator-release + shell: bash + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + tag="openwork-orchestrator-v${{ steps.sidecar-versions.outputs.orchestrator }}" + if gh release view "$tag" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then + echo "exists=true" >> "$GITHUB_OUTPUT" + else + echo "exists=false" >> "$GITHUB_OUTPUT" + fi + + - name: Build orchestrator release artifacts + env: + SOURCE_DATE_EPOCH: ${{ steps.source-date.outputs.epoch }} + run: | + pnpm --filter openwork-orchestrator build:bin:all + pnpm --filter openwork-orchestrator build:sidecars + + - name: Release review (strict) + env: + SOURCE_DATE_EPOCH: ${{ steps.source-date.outputs.epoch }} + run: node scripts/release/review.mjs --strict + + - name: Create openwork-orchestrator release + if: steps.orchestrator-release.outputs.exists != 'true' + env: + GH_TOKEN: ${{ github.token }} + run: | + version="${{ steps.sidecar-versions.outputs.orchestrator }}" + tag="openwork-orchestrator-v${version}" + notes="Sidecar bundle for openwork-orchestrator v${version}.\n\nopenwork-server: ${{ steps.sidecar-versions.outputs.server }}\nopencodeRouter: ${{ steps.sidecar-versions.outputs.opencodeRouter }}" + gh release create "$tag" \ + --repo "$GITHUB_REPOSITORY" \ + --title "openwork-orchestrator v${version}" \ + --notes "$notes" \ + --latest=false + + - name: Upload orchestrator release assets + env: + GH_TOKEN: ${{ github.token }} + run: | + tag="openwork-orchestrator-v${{ steps.sidecar-versions.outputs.orchestrator }}" + gh release upload "$tag" apps/orchestrator/dist/bin/* apps/orchestrator/dist/sidecars/* --repo "$GITHUB_REPOSITORY" --clobber + + publish-npm: + name: Publish npm packages + needs: [resolve-release, verify-release, release-orchestrator-sidecars] + if: | + always() && + needs.resolve-release.result == 'success' && + needs.verify-release.result == 'success' && + (needs.release-orchestrator-sidecars.result == 'success' || needs.release-orchestrator-sidecars.result == 'skipped') && + needs.resolve-release.outputs.publish_npm == 'true' + runs-on: blacksmith-4vcpu-ubuntu-2404 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + # Dispatch-based release recovery uses the workflow ref so fixes to + # npm release automation can run without moving an already-shipped tag. + ref: ${{ github.event_name == 'workflow_dispatch' && github.ref_name || env.RELEASE_TAG }} + fetch-depth: 0 + + - name: Setup Node + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Bun + uses: oven-sh/setup-bun@v1 + with: + bun-version: "1.3.6" + + - name: Get pnpm store path + id: pnpm-store + shell: bash + run: echo "path=$(pnpm store path --silent)" >> "$GITHUB_OUTPUT" + + - name: Cache pnpm store + uses: actions/cache@v5 + continue-on-error: true + with: + path: ${{ steps.pnpm-store.outputs.path }} + key: ubuntu-pnpm-${{ hashFiles('pnpm-lock.yaml') }} + restore-keys: | + ubuntu-pnpm- + + - name: Install dependencies + run: pnpm install --frozen-lockfile --prefer-offline + + - name: Resolve package versions + id: package-versions + shell: bash + run: | + node -e "const fs=require('fs'); const orchestrator=JSON.parse(fs.readFileSync('apps/orchestrator/package.json','utf8')); const server=JSON.parse(fs.readFileSync('apps/server/package.json','utf8')); const opencodeRouter=JSON.parse(fs.readFileSync('apps/opencode-router/package.json','utf8')); console.log('orchestrator=' + orchestrator.version); console.log('server=' + server.version); console.log('opencodeRouter=' + opencodeRouter.version);" >> "$GITHUB_OUTPUT" + + - name: Check npm versions + id: npm-versions + shell: bash + env: + ORCHESTRATOR_VERSION: ${{ steps.package-versions.outputs.orchestrator }} + SERVER_VERSION: ${{ steps.package-versions.outputs.server }} + OPENCODE_ROUTER_VERSION: ${{ steps.package-versions.outputs.opencodeRouter }} + run: | + set -euo pipefail + # npm view exits non-zero for packages that don't exist yet (404). + # Treat missing packages as "not published" so release can publish them. + orchestrator_current="$(npm view openwork-orchestrator version 2>/dev/null || true)" + server_current="$(npm view openwork-server version 2>/dev/null || true)" + opencodeRouter_current="$(npm view opencode-router version 2>/dev/null || true)" + + if [ "$orchestrator_current" = "$ORCHESTRATOR_VERSION" ]; then + echo "publish_orchestrator=false" >> "$GITHUB_OUTPUT" + else + echo "publish_orchestrator=true" >> "$GITHUB_OUTPUT" + fi + + if [ "$server_current" = "$SERVER_VERSION" ]; then + echo "publish_server=false" >> "$GITHUB_OUTPUT" + else + echo "publish_server=true" >> "$GITHUB_OUTPUT" + fi + + if [ "$opencodeRouter_current" = "$OPENCODE_ROUTER_VERSION" ]; then + echo "publish_opencodeRouter=false" >> "$GITHUB_OUTPUT" + else + echo "publish_opencodeRouter=true" >> "$GITHUB_OUTPUT" + fi + + publish_any=false + if [ "$orchestrator_current" != "$ORCHESTRATOR_VERSION" ] || [ "$server_current" != "$SERVER_VERSION" ] || [ "$opencodeRouter_current" != "$OPENCODE_ROUTER_VERSION" ]; then + publish_any=true + fi + echo "publish_any=$publish_any" >> "$GITHUB_OUTPUT" + + - name: Ensure npm auth + id: npm-auth + shell: bash + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + PUBLISH_ANY: ${{ steps.npm-versions.outputs.publish_any }} + run: | + set -euo pipefail + + if [ "${PUBLISH_ANY}" != "true" ]; then + echo "enabled=false" >> "$GITHUB_OUTPUT" + exit 0 + fi + + if [ -z "${NPM_TOKEN:-}" ]; then + echo "NPM_TOKEN not set; skipping npm publish." + echo "enabled=false" >> "$GITHUB_OUTPUT" + exit 0 + fi + + npm config set //registry.npmjs.org/:_authToken "$NPM_TOKEN" + echo "enabled=true" >> "$GITHUB_OUTPUT" + + - name: Publish openwork-server + if: steps.npm-auth.outputs.enabled == 'true' && steps.npm-versions.outputs.publish_server == 'true' + run: pnpm --filter openwork-server publish --access public --no-git-checks + + - name: Publish opencode-router + if: steps.npm-auth.outputs.enabled == 'true' && steps.npm-versions.outputs.publish_opencodeRouter == 'true' + run: pnpm --filter opencode-router publish --access public --no-git-checks + + - name: Publish openwork-orchestrator + if: steps.npm-auth.outputs.enabled == 'true' && steps.npm-versions.outputs.publish_orchestrator == 'true' + env: + GH_TOKEN: ${{ github.token }} + ORCHESTRATOR_VERSION: ${{ steps.package-versions.outputs.orchestrator }} + run: | + set -euo pipefail + tag="openwork-orchestrator-v${ORCHESTRATOR_VERSION}" + if ! gh release view "$tag" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then + echo "openwork-orchestrator sidecar release $tag not found. Publish sidecars before openwork-orchestrator." >&2 + exit 1 + fi + pnpm --filter openwork-orchestrator build:bin:all + node apps/orchestrator/scripts/publish-npm.mjs + + publish-daytona-snapshot: + name: Build + Push Daytona Snapshot + needs: [resolve-release, verify-release, publish-npm] + if: | + always() && + needs.resolve-release.result == 'success' && + needs.verify-release.result == 'success' && + (needs.publish-npm.result == 'success' || needs.publish-npm.result == 'skipped') && + needs.resolve-release.outputs.publish_daytona_snapshot == 'true' + uses: ./.github/workflows/release-daytona-snapshot.yml + with: + tag: ${{ needs.resolve-release.outputs.release_tag }} + secrets: inherit + + aur-publish: + name: Publish AUR + needs: [resolve-release, publish-tauri, publish-release] + if: | + always() && + needs.resolve-release.result == 'success' && + (needs.publish-tauri.result == 'success' || needs.publish-tauri.result == 'skipped') && + (needs.publish-release.result == 'success' || needs.publish-release.result == 'skipped') + runs-on: blacksmith-4vcpu-ubuntu-2404 + permissions: + contents: write + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + steps: + - name: Checkout dev + uses: actions/checkout@v6 + with: + ref: dev + fetch-depth: 0 + + - name: Update AUR packaging files + run: scripts/aur/update-aur.sh "$RELEASE_TAG" + + - name: Commit packaging update to dev + shell: bash + run: | + set -euo pipefail + + if ! git status --porcelain -- packaging/aur/PKGBUILD packaging/aur/.SRCINFO | grep -q .; then + echo "AUR packaging already up to date in dev." + exit 0 + fi + + version="${RELEASE_TAG#v}" + git add packaging/aur/PKGBUILD packaging/aur/.SRCINFO + git -c user.name="OpenWork Release Bot" \ + -c user.email="release-bot@users.noreply.github.com" \ + commit -m "chore(aur): update PKGBUILD for ${version}" + git push origin HEAD:dev + + - name: Publish to AUR + env: + AUR_SSH_PRIVATE_KEY: ${{ secrets.AUR_SSH_PRIVATE_KEY }} + AUR_REPO: ${{ vars.AUR_REPO || 'openwork' }} + AUR_SKIP_UPDATE: "1" + run: | + set -euo pipefail + if [ -z "${AUR_SSH_PRIVATE_KEY:-}" ]; then + echo "AUR_SSH_PRIVATE_KEY not set; skipping publish to AUR." + exit 0 + fi + scripts/aur/publish-aur.sh "$RELEASE_TAG" + + publish-release: + name: Publish GitHub Release + needs: + - resolve-release + - verify-release + - publish-tauri + - publish-updater-json + - publish-electron + - release-orchestrator-sidecars + - publish-npm + - publish-daytona-snapshot + if: | + always() && + needs.resolve-release.outputs.draft == 'true' && + needs.resolve-release.result == 'success' && + needs.verify-release.result == 'success' && + (needs.publish-tauri.result == 'success' || needs.publish-tauri.result == 'skipped') && + (needs.publish-updater-json.result == 'success' || needs.publish-updater-json.result == 'skipped') && + (needs.publish-electron.result == 'success' || needs.publish-electron.result == 'skipped') && + (needs.release-orchestrator-sidecars.result == 'success' || needs.release-orchestrator-sidecars.result == 'skipped') && + (needs.publish-npm.result == 'success' || needs.publish-npm.result == 'skipped') && + (needs.publish-daytona-snapshot.result == 'success' || needs.publish-daytona-snapshot.result == 'skipped') + runs-on: blacksmith-4vcpu-ubuntu-2404 + env: + RELEASE_TAG: ${{ needs.resolve-release.outputs.release_tag }} + RELEASE_PRERELEASE: ${{ needs.resolve-release.outputs.prerelease }} + steps: + - name: Publish release after assets are ready + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + + if [ "${RELEASE_PRERELEASE}" = "true" ]; then + gh release edit "$RELEASE_TAG" --repo "$GITHUB_REPOSITORY" --draft=false --prerelease + else + gh release edit "$RELEASE_TAG" --repo "$GITHUB_REPOSITORY" --draft=false --latest + fi diff --git a/.github/workflows/windows-signed-artifacts.yml b/.github/workflows/windows-signed-artifacts.yml new file mode 100644 index 0000000000..149290e5d5 --- /dev/null +++ b/.github/workflows/windows-signed-artifacts.yml @@ -0,0 +1,122 @@ +name: Windows Signed Artifacts + +on: + workflow_dispatch: + inputs: + ref: + description: Git ref to build + required: false + type: string + +permissions: + contents: read + +jobs: + build-and-sign-windows: + name: Build and sign Windows artifacts + runs-on: windows-latest + env: + TAURI_TARGET: x86_64-pc-windows-msvc + BUN_TARGET: bun-windows-x64 + WINDOWS_SIGNING_CERT_PASSWORD: ${{ secrets.WINDOWS_CERT_PASSWORD }} + WINDOWS_TIMESTAMP_URL: ${{ secrets.WINDOWS_TIMESTAMP_URL || 'http://timestamp.digicert.com' }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.ref || github.ref }} + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Bun + uses: oven-sh/setup-bun@v1 + with: + bun-version: 1.3.10 + + - name: Setup Rust + uses: dtolnay/rust-toolchain@stable + with: + targets: x86_64-pc-windows-msvc + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Prepare sidecars + run: pnpm -C apps/desktop prepare:sidecar + + - name: Import Windows signing certificate + shell: pwsh + env: + WINDOWS_CERT_PFX_BASE64: ${{ secrets.WINDOWS_CERT_PFX_BASE64 }} + run: | + if ([string]::IsNullOrWhiteSpace($env:WINDOWS_CERT_PFX_BASE64)) { + throw "WINDOWS_CERT_PFX_BASE64 is required for Windows signing." + } + if ([string]::IsNullOrWhiteSpace($env:WINDOWS_SIGNING_CERT_PASSWORD)) { + throw "WINDOWS_CERT_PASSWORD is required for Windows signing." + } + $bytes = [Convert]::FromBase64String($env:WINDOWS_CERT_PFX_BASE64) + $certPath = Join-Path $env:RUNNER_TEMP "windows-codesign.pfx" + [IO.File]::WriteAllBytes($certPath, $bytes) + "WINDOWS_CERT_PATH=$certPath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + + - name: Sign bundled Windows sidecars + shell: pwsh + run: | + $targets = @( + "apps/desktop/src-tauri/sidecars/opencode-$env:TAURI_TARGET.exe", + "apps/desktop/src-tauri/sidecars/opencode-router-$env:TAURI_TARGET.exe", + "apps/desktop/src-tauri/sidecars/openwork-server-v2-$env:TAURI_TARGET.exe" + ) + foreach ($target in $targets) { + if (!(Test-Path $target)) { + throw "Expected Windows sidecar missing: $target" + } + signtool sign /fd SHA256 /td SHA256 /tr $env:WINDOWS_TIMESTAMP_URL /f $env:WINDOWS_CERT_PATH /p $env:WINDOWS_SIGNING_CERT_PASSWORD $target + } + + - name: Build embedded Server V2 runtime + run: pnpm --filter openwork-server-v2 build:bin:embedded:windows --bundle-dir ../desktop/src-tauri/sidecars + working-directory: apps/server-v2 + + - name: Sign Server V2 executable + shell: pwsh + run: | + $serverPath = "apps/server-v2/dist/bin/openwork-server-v2-$env:BUN_TARGET.exe" + if (!(Test-Path $serverPath)) { + throw "Expected Server V2 executable missing: $serverPath" + } + signtool sign /fd SHA256 /td SHA256 /tr $env:WINDOWS_TIMESTAMP_URL /f $env:WINDOWS_CERT_PATH /p $env:WINDOWS_SIGNING_CERT_PASSWORD $serverPath + signtool verify /pa /v $serverPath + + - name: Build desktop Windows bundle + run: pnpm --filter @openwork/desktop exec tauri build --target x86_64-pc-windows-msvc + + - name: Sign desktop Windows artifacts + shell: pwsh + run: | + $artifacts = Get-ChildItem -Path "apps/desktop/src-tauri/target/x86_64-pc-windows-msvc/release/bundle" -Recurse -Include *.exe,*.msi + if ($artifacts.Count -eq 0) { + throw "No Windows desktop artifacts were produced to sign." + } + foreach ($artifact in $artifacts) { + signtool sign /fd SHA256 /td SHA256 /tr $env:WINDOWS_TIMESTAMP_URL /f $env:WINDOWS_CERT_PATH /p $env:WINDOWS_SIGNING_CERT_PASSWORD $artifact.FullName + signtool verify /pa /v $artifact.FullName + } + + - name: Upload signed artifacts + uses: actions/upload-artifact@v4 + with: + name: windows-signed-artifacts + path: | + apps/server-v2/dist/bin/openwork-server-v2-*.exe + apps/desktop/src-tauri/target/x86_64-pc-windows-msvc/release/bundle/**/*.exe + apps/desktop/src-tauri/target/x86_64-pc-windows-msvc/release/bundle/**/*.msi diff --git a/.gitignore b/.gitignore index 1581ebe143..0e45300eac 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,49 @@ +.turbo/ node_modules/ +apps/desktop/dist-electron/ +packages/*/node_modules/ +apps/*/node_modules/ +ee/apps/*/node_modules/ +ee/packages/*/node_modules/ .next/ out/ dist/ +packages/*/dist/ +apps/*/dist/ +ee/apps/*/dist/ +ee/packages/*/dist/ +tmp/ + +# Local git worktrees +_worktrees/ # Tauri/Rust -src-tauri/target/ +packages/desktop/src-tauri/target/ +packages/desktop/src-tauri/sidecars/ +apps/desktop/src-tauri/target/ +apps/desktop/src-tauri/sidecars/ +apps/desktop/resources/sidecars/ # Env .env .env.* +!.env.example +# The migration-release fragment is committed only on the tagged +# migration-release commit and removed by 03-post-migration-cleanup.mjs. +# Allow git to see it so `cut-migration-release` can commit it. +!.env.migration-release + +# Bun build artifacts +*.bun-build +apps/server/cli +apps/server-v2/openapi/openapi.json +packages/openwork-server-sdk/generated/ + +# pnpm store (created by Docker volume mounts) +.pnpm-store/ + +# Docker dev workspace (ephemeral mount point) +packaging/docker/workspace/ # OS .DS_Store @@ -19,3 +54,9 @@ vendor/opencode/ # OpenCode local deps .opencode/node_modules/ .opencode/bun.lock + +# OpenWork workspace-local artifacts +.opencode/openwork/ +.vercel +.env*.local +.claude/* diff --git a/.infisical.json b/.infisical.json new file mode 100644 index 0000000000..c91c16c84f --- /dev/null +++ b/.infisical.json @@ -0,0 +1,5 @@ +{ + "workspaceId": "e9f4542a-8714-46c3-a8fd-99d8cb370aeb", + "defaultEnvironment": "", + "gitBranchToEnvironmentMapping": null +} \ No newline at end of file diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000000..6c2b9be4c4 --- /dev/null +++ b/.npmrc @@ -0,0 +1,2 @@ +link-workspace-packages=true +prefer-workspace-packages=true diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000000..a45fd52cc5 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +24 diff --git a/.opencode/agent/css.md b/.opencode/agent/css.md new file mode 100644 index 0000000000..d5e68c7bf6 --- /dev/null +++ b/.opencode/agent/css.md @@ -0,0 +1,149 @@ +--- +description: use whenever you are styling a ui with css +--- + +you are very good at writing clean maintainable css using modern techniques + +css is structured like this + +```css +[data-page="home"] { + [data-component="header"] { + [data-slot="logo"] { + } + } +} +``` + +top level pages are scoped using `data-page` + +pages can break down into components using `data-component` + +components can break down into slots using `data-slot` + +structure things so that this hierarchy is followed IN YOUR CSS - you should rarely need to +nest components inside other components. you should NEVER nest components inside +slots. you should NEVER nest slots inside other slots. + +**IMPORTANT: This hierarchy rule applies to CSS structure, NOT JSX/DOM structure.** + +The hierarchy in css file does NOT have to match the hierarchy in the dom - you +can put components or slots at the same level in CSS even if one goes inside another in the DOM. + +Your JSX can nest however makes semantic sense - components can be inside slots, +slots can contain components, etc. The DOM structure should be whatever makes the most +semantic and functional sense. + +It is more important to follow the pages -> components -> slots structure IN YOUR CSS, +while keeping your JSX/DOM structure logical and semantic. + +use data attributes to represent different states of the component + +```css +[data-component="modal"] { + opacity: 0; + + &[data-state="open"] { + opacity: 1; + } +} +``` + +this will allow jsx to control the styling + +avoid selectors that just target an element type like `> span` you should assign +it a slot name. it's ok to do this sometimes where it makes sense semantically +like targeting `li` elements in a list + +in terms of file structure `./src/style/` contains all universal styling rules. +these should not contain anything specific to a page + +`./src/style/token` contains all the tokens used in the project + +`./src/style/component` is for reusable components like buttons or inputs + +page specific styles should go next to the page they are styling so +`./src/routes/about.tsx` should have its styles in `./src/routes/about.css` + +`about.css` should be scoped using `data-page="about"` + +## Example of correct implementation + +JSX can nest however makes sense semantically: + +```jsx +
+
Section Title
+
Content here
+
+``` + +CSS maintains clean hierarchy regardless of DOM nesting: + +```css +[data-page="home"] { + [data-component="screenshots"] { + [data-slot="left"] { + /* styles */ + } + [data-slot="content"] { + /* styles */ + } + } + + [data-component="title"] { + /* can be at same level even though nested in DOM */ + } +} +``` + +## Reusable Components + +If a component is reused across multiple sections of the same page, define it at the page level: + +```jsx + +
+
+

npm

+
+
+

bun

+
+
+ +
+
+
Screenshot Title
+
+
+``` + +```css +[data-page="home"] { + /* Reusable title component defined at page level since it's used in multiple components */ + [data-component="title"] { + text-transform: uppercase; + font-weight: 400; + } + + [data-component="install"] { + /* install-specific styles */ + } + + [data-component="screenshots"] { + /* screenshots-specific styles */ + } +} +``` + +This is correct because the `title` component has consistent styling and behavior across the page. + +## Key Clarifications + +1. **JSX Nesting is Flexible**: Components can be nested inside slots, slots can contain components - whatever makes semantic sense +2. **CSS Hierarchy is Strict**: Follow pages → components → slots structure in CSS +3. **Reusable Components**: Define at the appropriate level where they're shared (page level if used across the page, component level if only used within that component) +4. **DOM vs CSS Structure**: These don't need to match - optimize each for its purpose + +See ./src/routes/index.css and ./src/routes/index.tsx for a complete example. diff --git a/.opencode/agent/docs.md b/.opencode/agent/docs.md new file mode 100644 index 0000000000..21cfc6a16e --- /dev/null +++ b/.opencode/agent/docs.md @@ -0,0 +1,34 @@ +--- +description: ALWAYS use this when writing docs +color: "#38A3EE" +--- + +You are an expert technical documentation writer + +You are not verbose + +Use a relaxed and friendly tone + +The title of the page should be a word or a 2-3 word phrase + +The description should be one short line, should not start with "The", should +avoid repeating the title of the page, should be 5-10 words long + +Chunks of text should not be more than 2 sentences long + +Each section is separated by a divider of 3 dashes + +The section titles are short with only the first letter of the word capitalized + +The section titles are in the imperative mood + +The section titles should not repeat the term used in the page title, for +example, if the page title is "Models", avoid using a section title like "Add +new models". This might be unavoidable in some cases, but try to avoid it. + +Check out the /packages/web/src/content/docs/docs/index.mdx as an example. + +For JS or TS code snippets remove trailing semicolons and any trailing commas +that might not be needed. + +If you are making a commit prefix the commit message with `docs:` diff --git a/.opencode/agent/duplicate-pr.md b/.opencode/agent/duplicate-pr.md new file mode 100644 index 0000000000..c9c932ef79 --- /dev/null +++ b/.opencode/agent/duplicate-pr.md @@ -0,0 +1,26 @@ +--- +mode: primary +hidden: true +model: opencode/claude-haiku-4-5 +color: "#E67E22" +tools: + "*": false + "github-pr-search": true +--- + +You are a duplicate PR detection agent. When a PR is opened, your job is to search for potentially duplicate or related open PRs. + +Use the github-pr-search tool to search for PRs that might be addressing the same issue or feature. + +IMPORTANT: The input will contain a line `CURRENT_PR_NUMBER: NNNN`. This is the current PR number, you should not mark that the current PR as a duplicate of itself. + +Search using keywords from the PR title and description. Try multiple searches with different relevant terms. + +If you find potential duplicates: + +- List them with their titles and URLs +- Briefly explain why they might be related + +If no duplicates are found, say so clearly. BUT ONLY SAY "No duplicate PRs found" (don't say anything else if no dups) + +Keep your response concise and actionable. diff --git a/.opencode/agent/triage.md b/.opencode/agent/triage.md new file mode 100644 index 0000000000..5d1147a885 --- /dev/null +++ b/.opencode/agent/triage.md @@ -0,0 +1,78 @@ +--- +mode: primary +hidden: true +model: opencode/claude-haiku-4-5 +color: "#44BA81" +tools: + "*": false + "github-triage": true +--- + +You are a triage agent responsible for triaging github issues. + +Use your github-triage tool to triage issues. + +## Labels + +### windows + +Use for any issue that mentions Windows (the OS). Be sure they are saying that they are on Windows. + +- Use if they mention WSL too + +#### perf + +Performance-related issues: + +- Slow performance +- High RAM usage +- High CPU usage + +**Only** add if it's likely a RAM or CPU issue. **Do not** add for LLM slowness. + +#### desktop + +Desktop app issues: + +- `opencode web` command +- The desktop app itself + +**Only** add if it's specifically about the Desktop application or `opencode web` view. **Do not** add for terminal, TUI, or general opencode issues. + +#### nix + +**Only** add if the issue explicitly mentions nix. + +#### zen + +**Only** add if the issue mentions "zen" or "opencode zen" or "opencode black". + +If the issue doesn't have "zen" or "opencode black" in it then don't add zen label + +#### docs + +Add if the issue requests better documentation or docs updates. + +#### opentui + +TUI issues potentially caused by our underlying TUI library: + +- Keybindings not working +- Scroll speed issues (too fast/slow/laggy) +- Screen flickering +- Crashes with opentui in the log + +**Do not** add for general TUI bugs. + +When assigning to people here are the following rules: + +adamdotdev: +ONLY assign adam if the issue will have the "desktop" label. + +fwang: +ONLY assign fwang if the issue will have the "zen" label. + +jayair: +ONLY assign jayair if the issue will have the "docs" label. + +In all other cases use best judgment. Avoid assigning to kommander needlessly, when in doubt assign to rekram1-node. diff --git a/.opencode/commands/browser-setup.md b/.opencode/commands/browser-setup.md new file mode 100644 index 0000000000..240878a971 --- /dev/null +++ b/.opencode/commands/browser-setup.md @@ -0,0 +1,9 @@ +--- +name: browser-setup +description: Guide user through Chrome browser automation setup +--- + +Help the user set up browser automation. + +Use the `browser-setup-devtools` skill and follow it strictly (Chrome DevTools MCP only). +Keep the user prompt minimal and let the skill drive the setup dance. diff --git a/.opencode/commands/hello-stranger.md b/.opencode/commands/hello-stranger.md new file mode 100644 index 0000000000..47802aa711 --- /dev/null +++ b/.opencode/commands/hello-stranger.md @@ -0,0 +1,4 @@ +--- +description: Say hello stranger +--- +hello stranger diff --git a/.opencode/commands/release.md b/.opencode/commands/release.md new file mode 100644 index 0000000000..9b78a6087e --- /dev/null +++ b/.opencode/commands/release.md @@ -0,0 +1,22 @@ +--- +description: Run the OpenWork release flow +--- + +You are running the OpenWork release flow in this repo. + +Arguments: `$ARGUMENTS` +- If empty, default to a patch release. +- If set to `minor` or `major`, use that bump type. + +Do the following, in order, and stop on any failure: + +1. Sync `dev` and ensure the working tree is clean. +2. Bump app/desktop versions using `pnpm bump:$ARGUMENTS` (or `pnpm bump:patch` if empty). +3. If any dependencies were pinned or changed, run `pnpm install --lockfile-only`. +4. Run `pnpm release:review` and resolve any mismatches. +5. Tag and push: `git tag vX.Y.Z` and `git push origin vX.Y.Z`, then `git push origin dev`. +6. Watch the Release App GitHub Actions workflow to completion. +7. If releasing openwork-orchestrator sidecars, build deterministically with `SOURCE_DATE_EPOCH`, upload assets to `openwork-orchestrator-vX.Y.Z`, and publish `openwork-orchestrator`. +8. If `openwork-server` or `opencode-router` versions changed, publish those packages. + +Report what you changed, the tag created, and the GHA status. diff --git a/.opencode/openwork.json b/.opencode/openwork.json new file mode 100644 index 0000000000..5cf9aba26b --- /dev/null +++ b/.opencode/openwork.json @@ -0,0 +1,5 @@ +{ + "messaging": { + "enabled": true + } +} diff --git a/.opencode/package-lock.json b/.opencode/package-lock.json new file mode 100644 index 0000000000..46ddb1c8a0 --- /dev/null +++ b/.opencode/package-lock.json @@ -0,0 +1,376 @@ +{ + "name": ".opencode", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@opencode-ai/plugin": "1.4.9" + } + }, + "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz", + "integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz", + "integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz", + "integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz", + "integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz", + "integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz", + "integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@opencode-ai/plugin": { + "version": "1.4.9", + "resolved": "https://registry.npmjs.org/@opencode-ai/plugin/-/plugin-1.4.9.tgz", + "integrity": "sha512-tUtPbPs5xP9wonwuz5d/2y8QTrqFR8HOtAVTXvZ6iG26NJfW0dnnw9oTusVOayEIemd5abytCESm7X9ZZOMftQ==", + "license": "MIT", + "dependencies": { + "@opencode-ai/sdk": "1.4.9", + "effect": "4.0.0-beta.48", + "zod": "4.1.8" + }, + "peerDependencies": { + "@opentui/core": ">=0.1.100", + "@opentui/solid": ">=0.1.100" + }, + "peerDependenciesMeta": { + "@opentui/core": { + "optional": true + }, + "@opentui/solid": { + "optional": true + } + } + }, + "node_modules/@opencode-ai/sdk": { + "version": "1.4.9", + "resolved": "https://registry.npmjs.org/@opencode-ai/sdk/-/sdk-1.4.9.tgz", + "integrity": "sha512-S8WQLuBFu2WwvSc1wupsV4qskniBA+JN1VaZZs52BPWwiN2zQFTD5/6dMh6oiYOMDtPjKsTFZ6qLFxDvVPNggQ==", + "license": "MIT", + "dependencies": { + "cross-spawn": "7.0.6" + } + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/effect": { + "version": "4.0.0-beta.48", + "resolved": "https://registry.npmjs.org/effect/-/effect-4.0.0-beta.48.tgz", + "integrity": "sha512-MMAM/ZabuNdNmgXiin+BAanQXK7qM8mlt7nfXDoJ/Gn9V8i89JlCq+2N0AiWmqFLXjGLA0u3FjiOjSOYQk5uMw==", + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.1.0", + "fast-check": "^4.6.0", + "find-my-way-ts": "^0.1.6", + "ini": "^6.0.0", + "kubernetes-types": "^1.30.0", + "msgpackr": "^1.11.9", + "multipasta": "^0.2.7", + "toml": "^4.1.1", + "uuid": "^13.0.0", + "yaml": "^2.8.3" + } + }, + "node_modules/fast-check": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-4.7.0.tgz", + "integrity": "sha512-NsZRtqvSSoCP0HbNjUD+r1JH8zqZalyp6gLY9e7OYs7NK9b6AHOs2baBFeBG7bVNsuoukh89x2Yg3rPsul8ziQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT", + "dependencies": { + "pure-rand": "^8.0.0" + }, + "engines": { + "node": ">=12.17.0" + } + }, + "node_modules/find-my-way-ts": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/find-my-way-ts/-/find-my-way-ts-0.1.6.tgz", + "integrity": "sha512-a85L9ZoXtNAey3Y6Z+eBWW658kO/MwR7zIafkIUPUMf3isZG0NCs2pjW2wtjxAKuJPxMAsHUIP4ZPGv0o5gyTA==", + "license": "MIT" + }, + "node_modules/ini": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-6.0.0.tgz", + "integrity": "sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ==", + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/kubernetes-types": { + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/kubernetes-types/-/kubernetes-types-1.30.0.tgz", + "integrity": "sha512-Dew1okvhM/SQcIa2rcgujNndZwU8VnSapDgdxlYoB84ZlpAD43U6KLAFqYo17ykSFGHNPrg0qry0bP+GJd9v7Q==", + "license": "Apache-2.0" + }, + "node_modules/msgpackr": { + "version": "1.11.10", + "resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.10.tgz", + "integrity": "sha512-iCZNq+HszvF+fC3anCm4nBmWEnbeIAfpDs6IStAEKhQ2YSgkjzVG2FF9XJqwwQh5bH3N9OUTUt4QwVN6MLMLtA==", + "license": "MIT", + "optionalDependencies": { + "msgpackr-extract": "^3.0.2" + } + }, + "node_modules/msgpackr-extract": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz", + "integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "dependencies": { + "node-gyp-build-optional-packages": "5.2.2" + }, + "bin": { + "download-msgpackr-prebuilds": "bin/download-prebuilds.js" + }, + "optionalDependencies": { + "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" + } + }, + "node_modules/multipasta": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/multipasta/-/multipasta-0.2.7.tgz", + "integrity": "sha512-KPA58d68KgGil15oDqXjkUBEBYc00XvbPj5/X+dyzeo/lWm9Nc25pQRlf1D+gv4OpK7NM0J1odrbu9JNNGvynA==", + "license": "MIT" + }, + "node_modules/node-gyp-build-optional-packages": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz", + "integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==", + "license": "MIT", + "optional": true, + "dependencies": { + "detect-libc": "^2.0.1" + }, + "bin": { + "node-gyp-build-optional-packages": "bin.js", + "node-gyp-build-optional-packages-optional": "optional.js", + "node-gyp-build-optional-packages-test": "build-test.js" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/pure-rand": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-8.4.0.tgz", + "integrity": "sha512-IoM8YF/jY0hiugFo/wOWqfmarlE6J0wc6fDK1PhftMk7MGhVZl88sZimmqBBFomLOCSmcCCpsfj7wXASCpvK9A==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/toml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/toml/-/toml-4.1.1.tgz", + "integrity": "sha512-EBJnVBr3dTXdA89WVFoAIPUqkBjxPMwRqsfuo1r240tKFHXv3zgca4+NJib/h6TyvGF7vOawz0jGuryJCdNHrw==", + "license": "MIT", + "engines": { + "node": ">=20" + } + }, + "node_modules/uuid": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-13.0.0.tgz", + "integrity": "sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist-node/bin/uuid" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/yaml": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.3.tgz", + "integrity": "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg==", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" + } + }, + "node_modules/zod": { + "version": "4.1.8", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/.opencode/skill/release/SKILL.md b/.opencode/skill/release/SKILL.md deleted file mode 100644 index ba630cd348..0000000000 --- a/.opencode/skill/release/SKILL.md +++ /dev/null @@ -1,97 +0,0 @@ -# release - -Create a human-friendly **unsigned macOS DMG** release for OpenWork (or similar Tauri apps), and publish it on GitHub. - -This skill is intentionally lightweight: it’s mostly a checklist + a couple of sanity scripts. - -## What this skill is for - -- You have a Tauri app. -- You want to publish a **DMG** on GitHub Releases. -- You are **not** code signing / notarizing yet (so macOS will warn users). - -## Prereqs - -- `pnpm` -- Rust toolchain (`cargo`, `rustc`) -- `gh` authenticated (`gh auth status`) -- macOS tools: `codesign`, `spctl`, `hdiutil` - -## Release checklist (recommended) - -### 1) Clean working tree - -```bash -git status -``` - -### 2) Bump version everywhere - -- `package.json` (`version`) -- `src-tauri/tauri.conf.json` (`version`) -- `src-tauri/Cargo.toml` (`version`) - -### 3) Validate builds - -```bash -pnpm typecheck -pnpm build:web -cargo check --manifest-path src-tauri/Cargo.toml -``` - -### 4) Build DMG - -```bash -pnpm tauri build --bundles dmg -``` - -This should produce something like: - -- `src-tauri/target/release/bundle/dmg/OpenWork__aarch64.dmg` - -### 5) Verify “unsigned” state - -Unsigned here means: **not Developer ID signed / not notarized**. - -Quick checks: - -```bash -# mount the dmg read-only -hdiutil attach -nobrowse -readonly "src-tauri/target/release/bundle/dmg/.dmg" - -# verify signature details (expect ad-hoc or not notarized) -codesign -dv --verbose=4 "/Volumes//.app" - -# gatekeeper assessment (expect rejected) -spctl -a -vv "/Volumes//.app" || true - -# unmount -hdiutil detach "/Volumes/" -``` - -### 6) Tag + push - -```bash -git commit -am "Prepare vX.Y.Z release" -git tag -a vX.Y.Z -m "OpenWork vX.Y.Z" -git push -git push origin vX.Y.Z -``` - -### 7) Create / update GitHub Release - -```bash -gh release create vX.Y.Z \ - --title "OpenWork vX.Y.Z" \ - --notes "" - -gh release upload vX.Y.Z "src-tauri/target/release/bundle/dmg/.dmg" --clobber -``` - -## Local helper scripts - -- `bun .opencode/skill/release/first-call.ts` checks prerequisites and prints the current version. - -## Notes - -- If you later add signing/notarization, this skill should be updated to include that flow. diff --git a/.opencode/skill/release/client.ts b/.opencode/skill/release/client.ts deleted file mode 100644 index bbc206e104..0000000000 --- a/.opencode/skill/release/client.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { spawn } from "node:child_process"; - -export async function run( - command: string, - args: string[], - options?: { cwd?: string; allowFailure?: boolean }, -): Promise<{ ok: boolean; code: number; stdout: string; stderr: string }> { - const cwd = options?.cwd; - - const child = spawn(command, args, { - cwd, - stdio: ["ignore", "pipe", "pipe"], - env: process.env, - }); - - let stdout = ""; - let stderr = ""; - - child.stdout.setEncoding("utf8"); - child.stderr.setEncoding("utf8"); - - child.stdout.on("data", (d) => (stdout += d)); - child.stderr.on("data", (d) => (stderr += d)); - - const code = await new Promise((resolve) => { - child.on("close", (c) => resolve(c ?? -1)); - }); - - const ok = code === 0; - if (!ok && !options?.allowFailure) { - throw new Error( - `Command failed (${code}): ${command} ${args.join(" ")}\n${stderr || stdout}`, - ); - } - - return { ok, code, stdout, stderr }; -} diff --git a/.opencode/skill/release/first-call.ts b/.opencode/skill/release/first-call.ts deleted file mode 100644 index 26093d4c5a..0000000000 --- a/.opencode/skill/release/first-call.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { readFile } from "node:fs/promises"; -import { loadEnv } from "./load-env"; -import { run } from "./client"; - -async function main() { - await loadEnv(); - - await run("gh", ["auth", "status"], { allowFailure: false }); - - const pkgRaw = await readFile("package.json", "utf8"); - const pkg = JSON.parse(pkgRaw) as { name?: string; version?: string }; - - console.log( - JSON.stringify( - { - ok: true, - package: pkg.name ?? null, - version: pkg.version ?? null, - next: [ - "pnpm typecheck", - "pnpm tauri build --bundles dmg", - "gh release upload vX.Y.Z --clobber", - ], - }, - null, - 2, - ), - ); -} - -main().catch((e) => { - const message = e instanceof Error ? e.message : String(e); - console.error(message); - process.exit(1); -}); diff --git a/.opencode/skill/release/load-env.ts b/.opencode/skill/release/load-env.ts deleted file mode 100644 index e8fec9d65a..0000000000 --- a/.opencode/skill/release/load-env.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { run } from "./client"; - -const REQUIRED = [ - "pnpm", - "cargo", - "gh", - "hdiutil", - "codesign", - "spctl", -]; - -export async function loadEnv() { - const missing: string[] = []; - - for (const bin of REQUIRED) { - try { - await run("/usr/bin/env", ["bash", "-lc", `command -v ${bin}`], { allowFailure: false }); - } catch { - missing.push(bin); - } - } - - if (missing.length) { - throw new Error(`Missing required tools: ${missing.join(", ")}`); - } - - return { ok: true as const }; -} diff --git a/.opencode/skills/browser-setup-devtools/SKILL.md b/.opencode/skills/browser-setup-devtools/SKILL.md new file mode 100644 index 0000000000..4daca7d74a --- /dev/null +++ b/.opencode/skills/browser-setup-devtools/SKILL.md @@ -0,0 +1,37 @@ +--- +name: browser-setup-devtools +description: Guide users through browser automation setup using Chrome DevTools MCP only. Use when the user asks to set up browser automation, Chrome DevTools MCP, browser MCP, or runs the browser-setup command. +--- + +# Browser automation setup (Chrome DevTools MCP) + +## Principles + +- Keep prompts minimal; do as much as possible with tools and commands. +- Use Chrome DevTools MCP only. + +## Workflow + +1. Ask: "Do you have Chrome installed on this computer?" +2. If no or unsure: + - Offer to open the download page yourself and do it if possible. + - Provide a clickable link: https://www.google.com/chrome/ + - Continue after installation is confirmed. +3. Check DevTools MCP availability: + - Call `chrome-devtools_list_pages`. + - If pages exist, select one with `chrome-devtools_select_page`. + - If no pages, create one with `chrome-devtools_new_page` (use https://example.com) and then select it. +4. If DevTools MCP calls fail: + - Ask the user to open Chrome and keep it running. + - Retry `chrome-devtools_list_pages`. + - If it still fails, ensure `opencode.jsonc` includes `mcp["chrome-devtools"]` with command `['npx', '-y', 'chrome-devtools-mcp@latest']` and ask the user to restart OpenWork/OpenCode. + - Retry the DevTools MCP check. +5. If DevTools MCP is ready: + - Offer a first task ("Let's try opening a webpage"). + - If yes, use `chrome-devtools_navigate_page` or `chrome-devtools_new_page` to open the URL and confirm completion. + +## Response rules + +- Keep each user prompt to one short sentence when possible. +- Use direct offers like "I can open Chrome now" and follow with tool actions. +- Always present links as clickable URLs. diff --git a/.opencode/skills/cargo-lock-manager/.gitignore b/.opencode/skills/cargo-lock-manager/.gitignore new file mode 100644 index 0000000000..2334d82b84 --- /dev/null +++ b/.opencode/skills/cargo-lock-manager/.gitignore @@ -0,0 +1,2 @@ +.env +*.log diff --git a/.opencode/skills/cargo-lock-manager/SKILL.md b/.opencode/skills/cargo-lock-manager/SKILL.md new file mode 100644 index 0000000000..e7522f8d2d --- /dev/null +++ b/.opencode/skills/cargo-lock-manager/SKILL.md @@ -0,0 +1,67 @@ +--- +name: cargo-lock-manager +description: | + Manages Cargo.lock file updates and resolves --locked flag issues in CI/CD. + + Triggers when user mentions: + - "cargo test --locked failed" + - "cannot update the lock file" + - "Cargo.lock is out of date" + - "PR failed with --locked error" + - "fix Cargo.lock" +--- + +## Quick Usage (Already Configured) + +### Check Cargo.lock status +```bash +cd packages/desktop/src-tauri +cargo check --locked 2>&1 | head -20 +``` + +### Update Cargo.lock locally +```bash +cd packages/desktop/src-tauri +cargo update --workspace +``` + +### Test with --locked after update +```bash +cd packages/desktop/src-tauri +cargo test --locked +``` + +## Common Gotchas + +- The `--locked` flag prevents automatic updates to Cargo.lock, which is good for reproducible builds but fails when dependencies change. +- PRs often fail because the lock file wasn't committed after dependency updates. +- Running `cargo update` without `--workspace` may not update all workspace members. + +## When CI Fails with --locked + +### Option 1: Update lock file and commit (Recommended) +```bash +cd packages/desktop/src-tauri +cargo update --workspace +git add Cargo.lock +git commit -m "chore: update Cargo.lock" +git push +``` + +### Option 2: Use --offline flag (for air-gapped environments) +```bash +cargo test --manifest-path packages/desktop/src-tauri/Cargo.toml --offline +``` + +## First-Time Setup (If Not Configured) + +No setup required. This skill assumes: +- Rust/Cargo is installed +- You're in the openwork repository +- The Tauri app is in `packages/desktop/src-tauri/` + +## Prevention Tips + +- Always run `cargo check` or `cargo build` after modifying `Cargo.toml` files +- Include `Cargo.lock` changes in the same commit as dependency updates +- Consider adding a pre-commit hook to verify lock file is up to date diff --git a/.opencode/skills/cargo-lock-manager/scripts/check-lock.sh b/.opencode/skills/cargo-lock-manager/scripts/check-lock.sh new file mode 100755 index 0000000000..4681a1edb1 --- /dev/null +++ b/.opencode/skills/cargo-lock-manager/scripts/check-lock.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# Check if Cargo.lock is up to date with --locked flag +# Exit 0 if OK, exit 1 if needs update + +set -e + +CARGO_TOML="${1:-apps/desktop/src-tauri/Cargo.toml}" + +echo "🔍 Checking Cargo.lock status for: $CARGO_TOML" + +if cargo check --manifest-path "$CARGO_TOML" --locked 2>&1; then + echo "✅ Cargo.lock is up to date" + exit 0 +else + echo "❌ Cargo.lock needs update" + echo "" + echo "To fix, run:" + echo " cd $(dirname "$CARGO_TOML") && cargo update --workspace" + exit 1 +fi diff --git a/.opencode/skills/cargo-lock-manager/scripts/update-lock.sh b/.opencode/skills/cargo-lock-manager/scripts/update-lock.sh new file mode 100755 index 0000000000..dd2bea69ba --- /dev/null +++ b/.opencode/skills/cargo-lock-manager/scripts/update-lock.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Update Cargo.lock for the Tauri workspace + +set -e + +CARGO_TOML="${1:-apps/desktop/src-tauri/Cargo.toml}" +WORKDIR=$(dirname "$CARGO_TOML") + +echo "📦 Updating Cargo.lock in: $WORKDIR" + +cd "$WORKDIR" +cargo update --workspace + +echo "" +echo "✅ Cargo.lock updated" +echo "📝 Don't forget to commit the changes!" diff --git a/.opencode/skills/get-started/SKILL.md b/.opencode/skills/get-started/SKILL.md new file mode 100644 index 0000000000..b8a4848ff2 --- /dev/null +++ b/.opencode/skills/get-started/SKILL.md @@ -0,0 +1,22 @@ +--- +name: get-started +description: Guide users through the get started setup and Chrome DevTools demo. +--- + +## When to use + +- Always load this skill when the user says "get started". + +## What to do + +- Reply with these four lines, exactly and in order: + + 1) hey there welcome this is openwork + 2) we've pre-configured you with a couple tools + 3) Get Started + 4) write "hey go on google.com" + +## Then + +- If the user writes "go on google.com" (or "hey go on google.com"), use the chrome-devtools MCP to open the site. +- After the navigation completes, reply: "I'm on " where is the final URL or page title they asked for. diff --git a/.opencode/skill/opencode-bridge/SKILL.md b/.opencode/skills/opencode-bridge/SKILL.md similarity index 100% rename from .opencode/skill/opencode-bridge/SKILL.md rename to .opencode/skills/opencode-bridge/SKILL.md diff --git a/.opencode/skill/opencode-mirror/SKILL.md b/.opencode/skills/opencode-mirror/SKILL.md similarity index 86% rename from .opencode/skill/opencode-mirror/SKILL.md rename to .opencode/skills/opencode-mirror/SKILL.md index 9d5c579864..cf3359934e 100644 --- a/.opencode/skill/opencode-mirror/SKILL.md +++ b/.opencode/skills/opencode-mirror/SKILL.md @@ -19,5 +19,5 @@ git -C vendor/opencode pull --ff-only ### Clone mirror ```bash -git clone https://github.com/opencode-ai/opencode vendor/opencode +git clone https://github.com/anomalyco/opencode vendor/opencode ``` diff --git a/.opencode/skills/opencode-primitives/SKILL.md b/.opencode/skills/opencode-primitives/SKILL.md new file mode 100644 index 0000000000..c35e478c4e --- /dev/null +++ b/.opencode/skills/opencode-primitives/SKILL.md @@ -0,0 +1,47 @@ +--- +name: opencode-primitives +description: Reference OpenCode docs when implementing skills, plugins, MCPs, or config-driven behavior. +--- + +## Purpose +Use this skill whenever OpenWork behavior is implemented directly on top of OpenCode primitives (skills, plugins, MCP servers, opencode.json config, tools/permissions). It anchors decisions to the official OpenCode documentation and keeps terminology consistent in the UI. + +## Doc Sources (Always cite when relevant) +- Skills: https://opencode.ai/docs/skills +- Plugins: https://opencode.ai/docs/plugins/ +- MCP servers: https://opencode.ai/docs/mcp-servers/ +- Config (opencode.json, locations, precedence): https://opencode.ai/docs/config/ + +## Key Facts To Apply +### Skills +- Skill files live in `.opencode/skills//SKILL.md` or global `~/.config/opencode/skills//SKILL.md`. +- Skills are discovered by walking up to the git worktree and loading any matching `skills/*/SKILL.md` in `.opencode/` or `.claude/skills/`. +- `SKILL.md` requires YAML frontmatter: `name` + `description`. +- Name rules: lowercase alphanumeric with single hyphens (`^[a-z0-9]+(-[a-z0-9]+)*$`), length 1-64, must match directory name. +- Description length: 1-1024 characters. +- Access is governed by `opencode.json` permissions (`permission.skill` allow/deny/ask). + +### Plugins +- Local plugins live in `.opencode/plugins/` (project) or `~/.config/opencode/plugins/` (global). +- npm plugins are listed in `opencode.json` under `plugin` and installed with Bun at startup. +- Load order: global config, project config, global plugins dir, project plugins dir. + +### MCP Servers +- MCP servers are defined in `opencode.json` under `mcp` with unique names. +- Local servers use `type: "local"` + `command` array; remote servers use `type: "remote"` + `url`. +- Servers can be enabled/disabled via `enabled`. +- MCP tools are managed via `tools` in config, including glob patterns. +- OAuth is handled automatically for remote servers; can be pre-registered or disabled. + +### Config (opencode.json) +- Supports JSON and JSONC. +- Precedence order: remote `.well-known/opencode` -> global `~/.config/opencode/opencode.json` -> custom path -> project `opencode.json` -> `.opencode/` directories -> inline env overrides. +- `.opencode` subdirectories are plural by default (`agents/`, `commands/`, `plugins/`, `skills/`, `tools/`, `themes/`), with singular names supported for compatibility. + +## When to Invoke +- Adding or adjusting OpenWork flows that reference skills, plugins, MCP servers, or OpenCode config. +- Designing onboarding guidance that mentions skill/plugin installation, config locations, or permission prompts. +- Implementing UIs that surface OpenCode primitives (skills tab, plugin manager, MCP toggles). + +## Usage +Call `skill({ name: "opencode-primitives" })` before implementing or documenting any OpenWork behavior that maps to OpenCode primitives. diff --git a/.opencode/skill/openwork-core/SKILL.md b/.opencode/skills/openwork-core/SKILL.md similarity index 86% rename from .opencode/skill/openwork-core/SKILL.md rename to .opencode/skills/openwork-core/SKILL.md index cd92a4660e..ac668d2fde 100644 --- a/.opencode/skill/openwork-core/SKILL.md +++ b/.opencode/skills/openwork-core/SKILL.md @@ -6,7 +6,7 @@ description: Core context and guardrails for OpenWork native app ## Quick Usage (Already Configured) ### Orientation -- Read `AGENTS.md` and `design-prd.md` before changing behavior. +- Read `AGENTS.md`, `VISION.md`, `PRINCIPLES.md`, `PRODUCT.md`, and `ARCHITECTURE.md` before changing behavior. - Ensure `vendor/opencode` exists for self-reference. - Use the `tauri-solidjs` skill for stack-specific guidance. @@ -20,6 +20,9 @@ git -C vendor/opencode pull --ff-only pnpm tauri dev # Desktop development pnpm tauri ios dev # iOS development pnpm tauri android dev # Android development + +# Or run directly in the desktop package: +pnpm -C packages/desktop tauri dev ``` ## OpenCode Integration @@ -58,7 +61,7 @@ opencode -p "your prompt" -f json -q ### Clone the OpenCode mirror ```bash -git clone https://github.com/opencode-ai/opencode vendor/opencode +git clone https://github.com/anomalyco/opencode vendor/opencode ``` ### Initialize Tauri project @@ -81,5 +84,5 @@ pnpm tauri android init ### Clone the OpenCode mirror ```bash -git clone https://github.com/opencode-ai/opencode vendor/opencode +git clone https://github.com/anomalyco/opencode vendor/opencode ``` diff --git a/.opencode/skills/openwork-debug/SKILL.md b/.opencode/skills/openwork-debug/SKILL.md new file mode 100644 index 0000000000..f0615605e2 --- /dev/null +++ b/.opencode/skills/openwork-debug/SKILL.md @@ -0,0 +1,59 @@ +--- +name: openwork-debug +description: Debug OpenWork sidecars, config, and audit trail +--- + +## Credential check + +Set these before running the HTTP checks: + +- `OPENWORK_SERVER_URL` +- `OPENWORK_SERVER_TOKEN` +- `OPENWORK_WORKSPACE_ID` (optional; use `/workspaces` to discover) + +## Quick usage (read-only) + +```bash +curl -s "$OPENWORK_SERVER_URL/health" +curl -s "$OPENWORK_SERVER_URL/capabilities" \ + -H "Authorization: Bearer $OPENWORK_SERVER_TOKEN" + +curl -s "$OPENWORK_SERVER_URL/workspaces" \ + -H "Authorization: Bearer $OPENWORK_SERVER_TOKEN" +``` + +## Workspace config snapshot + +```bash +curl -s "$OPENWORK_SERVER_URL/workspace/$OPENWORK_WORKSPACE_ID/config" \ + -H "Authorization: Bearer $OPENWORK_SERVER_TOKEN" +``` + +## Audit log (recent) + +```bash +curl -s "$OPENWORK_SERVER_URL/workspace/$OPENWORK_WORKSPACE_ID/audit?limit=25" \ + -H "Authorization: Bearer $OPENWORK_SERVER_TOKEN" +``` + +## OpenCode engine checks + +```bash +opencode -p "ping" -f json -q +opencode mcp list +opencode mcp debug +``` + +## DB fallback (read-only) + +When the engine API is unavailable, you can inspect the SQLite db: + +```bash +sqlite3 ~/.opencode/opencode.db "select id, title, status from sessions order by updated_at desc limit 5;" +sqlite3 ~/.opencode/opencode.db "select role, content from messages order by created_at desc limit 10;" +``` + +## Notes + +- Audit logs are stored at `.opencode/openwork/audit.jsonl` in the workspace root. +- OpenWork server writes only within approved workspace roots. diff --git a/.opencode/skills/openwork-orchestrator-npm-publish/.env.example b/.opencode/skills/openwork-orchestrator-npm-publish/.env.example new file mode 100644 index 0000000000..8c3e04f3cb --- /dev/null +++ b/.opencode/skills/openwork-orchestrator-npm-publish/.env.example @@ -0,0 +1 @@ +NPM_TOKEN= diff --git a/.opencode/skills/openwork-orchestrator-npm-publish/.gitignore b/.opencode/skills/openwork-orchestrator-npm-publish/.gitignore new file mode 100644 index 0000000000..4c49bd78f1 --- /dev/null +++ b/.opencode/skills/openwork-orchestrator-npm-publish/.gitignore @@ -0,0 +1 @@ +.env diff --git a/.opencode/skills/openwork-orchestrator-npm-publish/SKILL.md b/.opencode/skills/openwork-orchestrator-npm-publish/SKILL.md new file mode 100644 index 0000000000..fb3e5cbedf --- /dev/null +++ b/.opencode/skills/openwork-orchestrator-npm-publish/SKILL.md @@ -0,0 +1,83 @@ +--- +name: openwork-orchestrator-npm-publish +description: | + Publish the openwork-orchestrator npm package with clean git hygiene. + + Triggers when user mentions: + - "openwork-orchestrator npm publish" + - "publish openwork-orchestrator" + - "bump openwork-orchestrator" +--- + +## Quick usage (already configured) + +1. Ensure you are on the default branch and the tree is clean. +2. Bump versions via the shared release bump (this keeps `openwork-orchestrator` aligned with the app/desktop release). + +```bash +pnpm bump:patch +# or: pnpm bump:minor +# or: pnpm bump:major +# or: pnpm bump:set -- X.Y.Z +``` + +3. Commit the bump. +4. Preferred: publish via the "Release App" GitHub Actions workflow by tagging `vX.Y.Z`. + +Manual recovery path (sidecars + npm) below. + +```bash +pnpm --filter openwork-orchestrator build:sidecars +gh release create openwork-orchestrator-vX.Y.Z packages/orchestrator/dist/sidecars/* \ + --repo different-ai/openwork \ + --title "openwork-orchestrator vX.Y.Z sidecars" \ + --notes "Sidecar binaries and manifest for openwork-orchestrator vX.Y.Z" +``` + +5. Build openwork-orchestrator binaries for all supported platforms. + +```bash +pnpm --filter openwork-orchestrator build:bin:all +``` + +6. Publish `openwork-orchestrator` as a meta package + platform packages (optionalDependencies). + +```bash +node packages/orchestrator/scripts/publish-npm.mjs +``` + +7. Verify the published version. + +```bash +npm view openwork-orchestrator version +``` + +--- + +## Scripted publish + +```bash +./.opencode/skills/openwork-orchestrator-npm-publish/scripts/publish-openwork-orchestrator.sh +``` + +--- + +## First-time setup (if not configured) + +Authenticate with npm before publishing. + +```bash +npm login +``` + +Alternatively, export an npm token in your environment (see `.env.example`). + +--- + +## Notes + +- `openwork-orchestrator` is published as: + - `openwork-orchestrator` (wrapper + optionalDependencies) + - `openwork-orchestrator-darwin-arm64`, `openwork-orchestrator-darwin-x64`, `openwork-orchestrator-linux-arm64`, `openwork-orchestrator-linux-x64`, `openwork-orchestrator-windows-x64` (platform binaries) +- `openwork-orchestrator` is versioned in lockstep with OpenWork app/desktop releases. +- openwork-orchestrator downloads sidecars from `openwork-orchestrator-vX.Y.Z` release assets by default. diff --git a/.opencode/skills/openwork-orchestrator-npm-publish/scripts/publish-openwork-orchestrator.sh b/.opencode/skills/openwork-orchestrator-npm-publish/scripts/publish-openwork-orchestrator.sh new file mode 100755 index 0000000000..dc6a6749e5 --- /dev/null +++ b/.opencode/skills/openwork-orchestrator-npm-publish/scripts/publish-openwork-orchestrator.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +set -euo pipefail + +root="$(git rev-parse --show-toplevel)" +cd "$root" + +if [ -n "$(git status --porcelain)" ]; then + echo "Working tree is dirty. Commit or stash before publish." + exit 1 +fi + +version=$(node -p "require('./apps/orchestrator/package.json').version") +echo "Publishing openwork-orchestrator@$version" + +pnpm --filter openwork-orchestrator publish --access public diff --git a/.opencode/skills/release/SKILL.md b/.opencode/skills/release/SKILL.md new file mode 100644 index 0000000000..97bd034bf0 --- /dev/null +++ b/.opencode/skills/release/SKILL.md @@ -0,0 +1,54 @@ +--- +title: Release flow +description: Step through versioning, tagging, and verification +name: release +--- + +## Prepare +Confirm the repo is on `main` and clean. Keep changes aligned with OpenCode primitives like `.opencode`, `opencode.json`, skills, and plugins when relevant. + +--- + +## Bump +Update versions in `packages/app/package.json`, `packages/desktop/package.json`, `packages/orchestrator/package.json` (publishes as `openwork-orchestrator`), `packages/desktop/src-tauri/tauri.conf.json`, and `packages/desktop/src-tauri/Cargo.toml`. Use one of these commands. + +```bash +pnpm bump:patch +pnpm bump:minor +pnpm bump:major +pnpm bump:set -- 0.1.21 +``` + +--- + +## Merge +Merge the version bump into `main`. Make sure no secrets or credentials are committed. + +--- + +## Tag +Create and push the tag to trigger the Release App workflow. + +```bash +git tag vX.Y.Z +git push origin vX.Y.Z +``` + +--- + +## Rerun +If a tag needs a rerun, dispatch the workflow. + +```bash +gh workflow run "Release App" --repo different-ai/openwork -f tag=vX.Y.Z +``` + +--- + +## Verify +Confirm the run and the published release. + +```bash +gh run list --repo different-ai/openwork --workflow "Release App" --limit 5 +gh release view vX.Y.Z --repo different-ai/openwork +``` diff --git a/.opencode/skills/solidjs-patterns/SKILL.md b/.opencode/skills/solidjs-patterns/SKILL.md new file mode 100644 index 0000000000..949a953ded --- /dev/null +++ b/.opencode/skills/solidjs-patterns/SKILL.md @@ -0,0 +1,93 @@ +--- +name: solidjs-patterns +description: SolidJS reactivity + UI state patterns for OpenWork +--- + +## Why this skill exists + +OpenWork’s UI is SolidJS: it updates via **signals**, not React-style rerenders. +Most “UI stuck” bugs are actually **state coupling** bugs (e.g. one global `busy()` disabling an unrelated action), not rerender issues. + +This skill captures the patterns we want to consistently use in OpenWork. + +## Core rules + +- Prefer **fine-grained signals** over shared global flags. +- Keep async actions **scoped** (each action gets its own `pending` state). +- Derive UI state via `createMemo()` instead of duplicating booleans. +- Avoid mutating arrays/objects stored in signals; always create new values. + +## Scoped async actions (recommended) + +When an operation can overlap with others (permissions, installs, background refresh), don’t reuse a global `busy()`. + +Use a dedicated signal per action: + +```ts +const [replying, setReplying] = createSignal(false); + +async function respond() { + if (replying()) return; + setReplying(true); + try { + await doTheThing(); + } finally { + setReplying(false); + } +} +``` + +### Why + +A single `busy()` boolean creates deadlocks: + +- Long-running task sets `busy(true)` +- A permission prompt appears and its buttons are disabled by `busy()` +- The task can’t continue until permission is answered +- The user can’t answer because buttons are disabled + +Fix: permission UI must be disabled only by a **permission-specific** pending state. + +## Signal snapshots in async handlers + +If you read signals inside an async function and you need stable values, snapshot early: + +```ts +const request = activePermission(); +if (!request) return; +const requestID = request.id; + +await respondPermission(requestID, "always"); +``` + +## Derived UI state + +Prefer `createMemo()` for computed disabled states: + +```ts +const canSend = createMemo(() => prompt().trim().length > 0 && !busy()); +``` + +## Lists + +- Use setter callbacks for derived updates: + +```ts +setItems((current) => current.filter((x) => x.id !== id)); +``` + +- Don’t mutate `current` in-place. + +## Practical checklist (SolidJS UI changes) + +- Does any button depend on a global flag that could be true during long-running work? +- Could two async actions overlap and fight over one boolean? +- Is any UI state duplicated (can be derived instead)? +- Do event handlers read signals after an `await` where values might have changed? +- If you refactor props/types, did you update all intermediate component signatures and call sites? + +## References + +- SolidJS: https://www.solidjs.com/docs/latest +- SolidJS signals: https://www.solidjs.com/docs/latest/api#createsignal +- SolidJS memos: https://www.solidjs.com/docs/latest/api#creatememo diff --git a/.opencode/skill/tauri-solidjs/SKILL.md b/.opencode/skills/tauri-solidjs/SKILL.md similarity index 78% rename from .opencode/skill/tauri-solidjs/SKILL.md rename to .opencode/skills/tauri-solidjs/SKILL.md index f3c34e4372..c6a5c469f3 100644 --- a/.opencode/skill/tauri-solidjs/SKILL.md +++ b/.opencode/skills/tauri-solidjs/SKILL.md @@ -34,25 +34,28 @@ pnpm tauri android build ## Project Structure ``` -apps/openwork/ - src-tauri/ - src/ - main.rs # Rust entry point - lib.rs # Tauri commands and state - Cargo.toml # Rust dependencies - tauri.conf.json # Tauri configuration - capabilities/ # Permission capabilities - src/ - App.tsx # SolidJS root component - index.tsx # Entry point - components/ # UI components - stores/ # Solid stores for state - lib/ # Utilities and OpenCode bridge - index.html # HTML template - package.json # Frontend dependencies - vite.config.ts # Vite configuration +openwork/ + packages/ + desktop/ + src-tauri/ + src/ + main.rs # Rust entry point + lib.rs # Tauri commands and state + Cargo.toml # Rust dependencies + tauri.conf.json # Tauri configuration + capabilities/ # Permission capabilities + src/ + App.tsx # SolidJS root component + index.tsx # Entry point + components/ # UI components + stores/ # Solid stores for state + lib/ # Utilities and OpenCode bridge + index.html # HTML template + package.json # Frontend dependencies + vite.config.ts # Vite configuration ``` + ## Key Dependencies ### Frontend (package.json) @@ -88,7 +91,7 @@ serde_json = "1" ## Tauri Commands (Rust -> JS) ```rust -// src-tauri/src/lib.rs +// packages/desktop/src-tauri/src/lib.rs use tauri::Manager; #[tauri::command] diff --git a/.vercelignore b/.vercelignore new file mode 100644 index 0000000000..8de21fa1de --- /dev/null +++ b/.vercelignore @@ -0,0 +1,15 @@ +_archive +_worktrees +.git +.opencode +vendor +**/node_modules +**/.next +**/dist +**/.*.bun-build +packages/agent-lab +packages/desktop +packages/headless +packages/opencode-router +packages/owpenbot +packages/server diff --git a/AGENTS.md b/AGENTS.md index 85537a2006..9958c15e2b 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,86 +1,273 @@ # AGENTS.md -OpenWork exists to bring OpenCode's agentic power to non-technical people through an accessible, transparent **native GUI**. It is an open-source competitor to Anthropic's Cowork and must stay faithful to OpenCode's principles: self-building, self-referential, standards-first, and graceful degradation. +OpenWork helps users run agents, skills, and MCP. It is an open-source alternative to Claude Cowork/Codex as a desktop app. + +## What OpenWork Is + +OpenWork is a practical control surface for agentic work: + +* Run local and remote agent workflows from one place. +* Use OpenCode capabilities directly through OpenWork. +* Compose desktop app, server, and messaging connectors without lock-in. +* Treat the OpenWork app as a client of the OpenWork server API surface. +* Connect to hosted workers through a simple user flow: `Add a worker` -> `Connect remote`. + +## Core Philosophy + +* **Local-first, cloud-ready**: OpenWork runs on your machine in one click and can connect to cloud workflows when needed. +* **Server-consumption first**: the app should consume OpenWork server surfaces (self-hosted or hosted), not invent parallel behavior. +* **Composable**: use the desktop app, WhatsApp/Slack/Telegram connectors, or server mode based on the task. +* **Ejectable**: OpenWork is powered by OpenCode, so anything OpenCode can do is available in OpenWork, even before a dedicated UI exists. +* **Sharing is caring**: start solo, then share quickly; one CLI or desktop command can spin up an instantly shareable instance. + +## Core Runtime Model (Updated) + +OpenWork now has three production-grade ways to run the same product surface: + +1. **Desktop-hosted app/server** + - OpenWork app runs locally and can host server functionality on-device. +2. **CLI-hosted server (openwork-orchestrator)** + - OpenWork server surfaces can be provided by the orchestrator/CLI on a trusted machine. +3. **Hosted OpenWork Cloud server** + - OpenWork-hosted infrastructure provisions workers and exposes the same remote-connect semantics. + +User mental model: + +* The app is the UI and control layer. +* The server is the execution/control API layer. +* A worker is a remote runtime destination. +* Connecting to a worker happens through `Add worker` -> `Connect remote` using URL + token (or deep link). + +Read `ARCHITECTURE.md` for runtime flow, server-vs-shell ownership, and architecture behavior. Read `INFRASTRUCTURE.md` for deployment and control-plane details. ## Why OpenWork Exists -1. **OpenCode is powerful but terminal-only.** Non-technical users can't access it. -2. **Cowork is closed-source and locked to Claude Max.** We need an open alternative. -3. **Mobile-first matters.** People want to run tasks from their phones. -4. **Slick UI is non-negotiable.** The experience must feel premium, not utilitarian. +**Cowork is closed-source and locked to Claude Max.** We need an open alternative. +**Mobile-first matters.** People want to run tasks from their phones, including via messaging surfaces like WhatsApp and Telegram through OpenCode Router. +**Slick UI is non-negotiable.** The experience must feel premium, not utilitarian. + +## Agent Guidelines for development + +* **Purpose-first UI**: prioritize clarity, safety, and approachability for non-technical users. +* **Parity with OpenCode**: anything the UI can do must map cleanly to OpenCode tools. +* **Prefer OpenCode primitives**: represent concepts using OpenCode's native surfaces first (folders/projects, `.opencode`, `opencode.json`, skills, plugins) before introducing new abstractions. +* **Web parity**: anything that mutates `.opencode/` should be expressible via the OpenWork server API; Tauri-only filesystem calls are a fallback for host mode, not a separate capability set. +* **Self-referential**: maintain a gitignored mirror of OpenCode at `vendor/opencode` for inspection. +* **Self-building**: prefer prompts, skills, and composable primitives over bespoke logic. +* **Open source**: keep the repo portable; no secrets committed. +* **Slick and fluid**: 60fps animations, micro-interactions, premium feel. +* **Mobile-native**: touch targets, gestures, and layouts optimized for small screens. +* **Provider-neutral control**: expose app actions through OpenWork-owned control surfaces first; provider-specific controllers should drive those surfaces rather than hardwiring provider logic into the app UI. + +## Task Intake (Required) + +Before making changes, explicitly confirm the target repository in your first task update. + +Required format: + +1. `Target repo: ` (for example: `_repos/openwork`) +2. `Out of scope repos: ` (for example: `_repos/opencode`) +3. `Planned output: ` + +If the user request references multiple repos and the intended edit location is ambiguous, stop after discovery and ask for a single repo target before editing files. + +## New Feature Workflow (Required) + +When the user asks to create a new feature, follow this exact procedure: + +1. Make sure you are up to date on all submodules and repos synced to the head of remotes. +2. Create a worktree. +3. Implement the feature. +4. Start the narrowest supported product stack for the flow under test. +5. Use Chrome MCP to fully test the feature with the relevant flow-specific skill. +6. Take screenshots and put them in the repo. +7. Refer to these screenshots in the PR (only if relevant in the UI). +8. Always test the flow you just implemented. + +If you cannot complete steps 4-8 (Docker, Chrome MCP, missing credentials, or environment limitations), you must say so explicitly and include: + +* which steps you could not run and why +* what you verified instead (tests, logs, manual checks) +* the exact commands/steps the user should run to complete the end-to-end gate + +## Pull Request Expectations (Fast Merge) + +If you open a PR, you must run tests and report what you ran (commands + result). + +To maximize merge speed, include evidence of the end-to-end flow: + +* Ideally: attach a short video/screen recording showing the flow running successfully. +* Otherwise: screenshots are acceptable, but video is preferred. + +If you cannot run tests or capture the video, say so explicitly and explain why, and include the exact commands/steps for the reviewer to reproduce. + +## Living Systems -## Core Expectations +OpenWork aims to be a **living system**: agents, skills, commands, and config are hot-reloadable while sessions are running. This enables agents to create new skills or update their own configuration and have changes take effect immediately, without tearing down active sessions. -- **Purpose-first UI**: prioritize clarity, safety, and approachability for non-technical users. -- **Parity with OpenCode**: anything the UI can do must map cleanly to OpenCode tools. -- **Self-referential**: maintain a gitignored mirror of OpenCode at `vendor/opencode` for inspection. -- **Self-building**: prefer prompts, skills, and composable primitives over bespoke logic. -- **Open source**: keep the repo portable; no secrets committed. -- **Slick and fluid**: 60fps animations, micro-interactions, premium feel. -- **Mobile-native**: touch targets, gestures, and layouts optimized for small screens. +Design principles for hot reload: + +* **Conservative triggers**: only reload when a file that OpenCode reads at startup actually changes inside `.opencode/` or `opencode.json`. Ignore metadata files like `openwork.json`, `.DS_Store`, etc. +* **Workspace-scoped**: reload state is keyed per workspace. Switching workspaces never leaks reload signals from one workspace to another. +* **Session-aware**: when sessions are actively running, queue reload signals. Promote to visible reload (toast or auto-reload) only after all active sessions finish. This avoids interrupting in-flight tool calls. +* **Auto-reload setting**: each workspace can opt into automatic reload via `.opencode/openwork.json` (`reload.auto`). When enabled, the engine reloads automatically once queued signals are ready and no sessions are active. +* **Session continuity**: before reload, capture running session IDs, agents, and models. After reload, optionally relaunch those sessions so the user experiences seamless continuity. +* **Per-workspace isolation**: the desktop file watcher only watches the runtime-connected workspace root and its `.opencode/` directory. This can differ briefly from the UI-selected workspace while the user browses another workspace. The server reload event store is already keyed by `workspaceId`. ## Technology Stack -| Layer | Technology | -|-------|------------| -| Desktop/Mobile shell | Tauri 2.x | -| Frontend | SolidJS + TailwindCSS | -| State | Solid stores + IndexedDB | -| IPC | Tauri commands + events | +| Layer | Technology | +| -------------------- | ------------------------- | +| Desktop/Mobile shell | Tauri 2.x with Electron migration path | +| Frontend | SolidJS + TailwindCSS | +| State | Solid stores + IndexedDB | +| IPC | Tauri commands + events | | OpenCode integration | Spawn CLI or embed binary | ## Repository Guidance -- Always read `design-prd.md` at session start for product intent and user flows. -- Keep `design-prd.md` and `.opencode/skill/*/SKILL.md` updated when behavior changes. -- Use `.opencode/skill/` for repeatable workflows and domain vocabulary. +* Use `VISION.md`, `PRINCIPLES.md`, `PRODUCT.md`, `ARCHITECTURE.md`, and `INFRASTRUCTURE.md` to understand the "why" and requirements so you can guide your decisions. +* Treat `ARCHITECTURE.md` as the authoritative system design source for runtime flow, server ownership, filesystem mutation policy, and agent/runtime boundaries. If those behaviors change, update `ARCHITECTURE.md` in the same task. +* Use `DESIGN-LANGUAGE.md` as the default visual reference for OpenWork app and landing work. +* For OpenWork session-surface details, also reference `packages/docs/orbita-layout-style.mdx`. + +## App Architecture (CUPID) + +For `apps/app/src/app/**`, use CUPID: small public surfaces, intention-revealing names, minimal dependencies, predictable ownership, and domain-based structure. + +* Organize app code by product domain and app behavior, not generic buckets like `pages`, `hooks`, `utils`, or app-wide props. +* Prefer a thin shell, domain modules, and tiny shared primitives. +* Colocate state, UI, helpers, and server/client adapters with the domain that owns the workflow. +* Treat shared utilities as a last resort; promote only after multiple real consumers exist. +* Cross-domain imports should go through a small public API, not another domain's internals. +* Keep global shell code thin and use it for routing, top-level layout, runtime wiring, and shared reload/update surfaces only. +* Domain map: shell, workspace, session, connections, cloud, app-settings, and kernel. +* When changing app architecture, moving ownership, or editing hot spots like `app.tsx`, `pages/dashboard.tsx`, `pages/session.tsx`, or `pages/settings.tsx`, consult the workspace-root skill at `../../.opencode/skills/cupid-app-architecture/SKILL.md` first. + +## Dev Debugging + +* If you change `apps/server/src`, rebuild the OpenWork server binary (`pnpm --filter openwork-server build:bin`) because `openwork` (openwork-orchestrator) runs the compiled server, not the TS sources. ## Local Structure ``` -apps/openwork/ - AGENTS.md # This file - design-prd.md # Exhaustive PRD and user flow map - .gitignore # Ignores vendor/opencode, node_modules, etc. +openwork/ + AGENTS.md # This file + VISION.md # Product vision and positioning + PRINCIPLES.md # Decision framework and guardrails + PRODUCT.md # Requirements, UX, and user flows + ARCHITECTURE.md # Runtime modes and OpenCode integration + .gitignore # Ignores vendor/opencode, node_modules, etc. .opencode/ - skill/ # Skills for product workflows - vendor/ - opencode/ # Gitignored OpenCode mirror for self-inspection - src-tauri/ # Rust backend (Tauri) - src/ # SolidJS frontend - package.json # Frontend dependencies - Cargo.toml # Rust dependencies + apps/ + app/ + src/ + public/ + pr/ + prd/ + package.json + desktop/ + src-tauri/ + package.json + server/ + src/ + package.json ``` ## OpenCode SDK Usage OpenWork integrates with OpenCode via: -1. **Non-interactive mode**: `opencode -p "prompt" -f json -q` -2. **Database access**: Read `.opencode/opencode.db` for sessions and messages. -3. **MCP bridge**: OpenWork as an MCP server for real-time permissions and streaming. +1. **Non-interactive mode**: `opencode -p "prompt" -f json -q` +2. **Database access**: Read `.opencode/opencode.db` for sessions and messages. Key primitives to expose: -- `session.Service` — Task runs, history -- `message.Service` — Chat bubbles, tool calls -- `agent.Service` — Task execution, progress -- `permission.Service` — Permission prompts -- `tools.BaseTool` — Step-level actions + +* `session.Service` — Task runs, history +* `message.Service` — Chat bubbles, tool calls +* `agent.Service` — Task execution, progress +* `permission.Service` — Permission prompts +* `tools.BaseTool` — Step-level actions ## Safety + Accessibility -- Default to least-privilege permissions and explicit user approvals. -- Provide transparent status, progress, and reasoning at every step. -- Use progressive disclosure for advanced controls. -- WCAG 2.1 AA compliance. -- Screen reader labels for all interactive elements. +* Default to least-privilege permissions and explicit user approvals. +* Provide transparent status, progress, and reasoning at every step. +* WCAG 2.1 AA compliance. +* Screen reader labels for all interactive elements. ## Performance Targets -| Metric | Target | -|--------|--------| -| First contentful paint | <500ms | -| Time to interactive | <1s | -| Animation frame rate | 60fps | -| Interaction latency | <100ms | -| Bundle size (JS) | <200KB gzipped | +| Metric | Target | +| ---------------------- | -------------- | +| First contentful paint | <500ms | +| Time to interactive | <1s | +| Animation frame rate | 60fps | +| Interaction latency | <100ms | +| Bundle size (JS) | <200KB gzipped | + +## Skill: SolidJS Patterns + +When editing SolidJS UI (`apps/app/src/**/*.tsx`), consult: + +* `.opencode/skills/solidjs-patterns/SKILL.md` + +This captures OpenWork’s preferred reactivity + UI state patterns (avoid global `busy()` deadlocks; use scoped async state). + +## Skill: Trigger a Release + +OpenWork releases are built by GitHub Actions (`Release App`). A release is triggered by pushing a `v*` tag (e.g. `v0.1.6`). +`Release App` can also publish openwork-orchestrator sidecars and npm packages when enabled via workflow inputs or repo vars (`RELEASE_PUBLISH_SIDECARS`, `RELEASE_PUBLISH_NPM`). + +### Standard release (recommended) + +1. Ensure `main` is green and up to date. +2. Bump versions (keep these in sync): + +* `apps/app/package.json` (`version`) +* `apps/desktop/package.json` (`version`) +* `apps/orchestrator/package.json` (`version`, publishes as `openwork-orchestrator`) +* `apps/desktop/src-tauri/tauri.conf.json` (`version`) +* `apps/desktop/src-tauri/Cargo.toml` (`version`) + +You can bump all three non-interactively with: + +* `pnpm bump:patch` +* `pnpm bump:minor` +* `pnpm bump:major` +* `pnpm bump:set -- 0.1.21` + +3. Merge the version bump to `main`. +4. Create and push a tag: + * `git tag vX.Y.Z` + * `git push origin vX.Y.Z` + +This triggers the workflow automatically (`on: push.tags: v*`). + +### Re-run / repair an existing release + +If the workflow needs to be re-run for an existing tag (e.g. notarization retry), use workflow dispatch: + +* `gh workflow run "Release App" --repo different-ai/openwork -f tag=vX.Y.Z` + +### Verify + +* Runs: `gh run list --repo different-ai/openwork --workflow "Release App" --limit 5` +* Release: `gh release view vX.Y.Z --repo different-ai/openwork` + +Confirm the DMG assets are attached and versioned correctly. + +## Skill: Publish openwork-orchestrator (npm) + +This is usually covered by `Release App` when `publish_sidecars` + `publish_npm` are enabled. Use `.opencode/skills/openwork-orchestrator-npm-publish/SKILL.md` for manual recovery or one-off publishing. + +1. Ensure the default branch is up to date and clean. +2. Bump `apps/orchestrator/package.json` (`version`). +3. Commit the bump. +4. Build and upload sidecar assets for the same version tag: + * `pnpm --filter openwork-orchestrator build:sidecars` + * `gh release create openwork-orchestrator-vX.Y.Z apps/orchestrator/dist/sidecars/* --repo different-ai/openwork` +5. Publish: + * `pnpm --filter openwork-orchestrator publish --access public` +6. Verify: + * `npm view openwork-orchestrator version` diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 0000000000..ab75b712c0 --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,551 @@ +# OpenWork Architecture + +## Design principle: Predictable > Clever + +OpenWork optimizes for **predictability** over "clever" auto-detection. Users should be able to form a correct mental model of what will happen. + +Guidelines: + +- Prefer **explicit configuration** (a single setting or env var) over heuristics. +- Auto-detection is acceptable as a convenience, but must be: + - explainable (we can tell the user what we tried) + - overrideable (one obvious escape hatch) + - safe (no surprising side effects) +- When a prerequisite is missing, surface the **exact failing check** and a concrete next step. + +### Example: Docker-backed sandboxes (desktop) + +When enabling Docker-backed sandbox mode, prefer an explicit, single-path override for the Docker client binary: + +- `OPENWORK_DOCKER_BIN` (absolute path to `docker`) + +This keeps behavior predictable across environments where GUI apps do not inherit shell PATH (common on macOS). + +Auto-detection can exist as a convenience, but should be tiered and explainable: + +1. Honor `OPENWORK_DOCKER_BIN` if set. +2. Try the process PATH. +3. On macOS, try the login PATH from `/usr/libexec/path_helper`. +4. Last-resort: try well-known locations (Homebrew, Docker Desktop bundle) and validate the binary exists. + +The readiness check should be a clear, single command (e.g. `docker info`) and the UI should show the exact error output when it fails. + +## Minimal use of Tauri +We move most of the functionality to the openwork server which interfaces mostly with FS and proxies to opencode. + + + +## Filesystem mutation policy + +OpenWork should route filesystem mutations through the OpenWork server whenever possible. + +Why: + +- the server is the one place that can apply the same behavior for both local and remote workspaces +- server-routed writes keep permission checks, approvals, audit trails, and reload events consistent +- Tauri-only filesystem mutations only work in desktop host mode and break parity with remote execution + +Guidelines: + +- Any UI feature that changes workspace files or config should call an OpenWork server endpoint first. +- Local Tauri filesystem commands are a host-mode fallback, not the primary product surface. +- If a feature cannot yet write through the OpenWork server, treat that as an architecture gap and close it before depending on direct local writes. +- Reads can fall back locally when necessary, but writes should be designed around the OpenWork server path. + +## Agent authority map + +When OpenWork is edited from `openwork-enterprise`, architecture and runtime behavior should be sourced from this document. + +| Entry point | Role | Architecture authority | +| --- | --- | --- | +| `openwork-enterprise/AGENTS.md` | OpenWork Factory multi-repo orchestration | Defers OpenWork runtime flow, server-vs-shell ownership, and filesystem mutation behavior to `_repos/openwork/ARCHITECTURE.md`. | +| `openwork-enterprise/.opencode/agents/openwork-surgeon.md` | Surgical fix agent for `_repos/openwork` | Uses `_repos/openwork/ARCHITECTURE.md` as the runtime and architecture source of truth before changing product behavior. | +| `_repos/openwork/AGENTS.md` | Product vocabulary, audience, and repo-local development guidance | Refers to `ARCHITECTURE.md` for runtime flow, server ownership, and architectural boundaries. | +| Skills / commands / agents that mutate workspace state | Capability layer on top of the product runtime | Should assume the OpenWork server path is canonical for workspace creation, config writes, `.opencode/` mutation, and reload signaling. | + +### Agent access to server-owned behavior + +Agents, skills, and commands should model the following as OpenWork server behavior first: + +- workspace creation and initialization +- writes to `.opencode/`, `opencode.json`, and `opencode.jsonc` +- OpenWork workspace config writes (`.opencode/openwork.json`) +- share-bundle publish/fetch flows for supported OpenWork capability bundles such as skills +- reload event generation after config or capability changes +- other filesystem-backed capability changes that must work across desktop host mode and remote clients + +Tauri or other native shell behavior remains the fallback or shell boundary for: + +- file and folder picking +- reveal/open-in-OS affordances +- updater and window management +- host-side process supervision and native runtime bootstrapping + +If an agent needs one of the server-owned behaviors above and only a Tauri path exists, treat that as an architecture gap to close rather than a parallel capability surface to preserve. + +## Release channels + +OpenWork desktop ships through two release channels: + +- **Stable** (default, all platforms): versioned builds produced by the `Release App` workflow. Each tag `vX.Y.Z` publishes signed, notarized Tauri bundles plus a `latest.json` updater manifest at `https://github.com/different-ai/openwork/releases/latest/download/latest.json`; when Electron publishing is enabled, the same release also carries signed, notarized Electron macOS assets plus `latest-mac.yml` at `https://github.com/different-ai/openwork/releases/latest/download/latest-mac.yml`. +- **Alpha** (macOS arm64 only, rolling): every merge to `dev` publishes signed, notarized Tauri and Electron builds to the rolling GitHub release tagged `alpha-macos-latest`. The Tauri alpha updater manifest lives at `https://github.com/different-ai/openwork/releases/download/alpha-macos-latest/latest.json`; Electron alpha assets include `latest-mac.yml` at `https://github.com/different-ai/openwork/releases/download/alpha-macos-latest/latest-mac.yml` on the same release. + +Guidelines: + +- The Tauri alpha channel is an opt-in preference (`LocalPreferences.releaseChannel`). The normal Updates toggle is rendered only when `isTauriRuntime()` and `isMacPlatform()` both resolve true; other platforms silently fall back to stable even if the stored preference says `"alpha"`. +- The Electron alpha channel is Debug-only during the migration window. Migrated Electron users can switch feeds from Settings → Debug → Electron alpha channel; the normal Updates page stays on the selected Electron feed and defaults to stable. +- Alpha builds advertise the next patch version plus an `-alpha.+` prerelease suffix. That keeps semver ordering `stable < alpha.1 < alpha.2 < next stable` so alpha users migrate forward cleanly when the next stable ships. +- Alpha and stable share the same Tauri updater signing keypair so an installed stable can upgrade into alpha and vice versa without re-installing manually. +- Apple signing and notarization are required on both channels; `alpha-macos-aarch64.yml` fails closed unless `MACOS_NOTARIZE=true`, and the `Release App` Electron job reuses the same Tauri Apple signing/notary secrets. +- The alpha workflow is the source of truth for the alpha channel's CI contract. Treat `.github/workflows/alpha-macos-aarch64.yml`, `apps/app/src/app/lib/release-channels.ts`, and this document as one coupled unit. + +Code references: + +- Workflow: `.github/workflows/alpha-macos-aarch64.yml` +- Endpoint resolution: `apps/app/src/app/lib/release-channels.ts` +- Electron alpha resolver: `apps/app/src/app/lib/electron-alpha.ts` +- Preference plumbing: `apps/app/src/react-app/kernel/local-provider.tsx`, `apps/app/src/react-app/domains/settings/pages/updates-view.tsx`, `apps/app/src/react-app/domains/settings/pages/debug-view.tsx` +- Stable workflow (reference): `.github/workflows/release-macos-aarch64.yml` + +## Reload-required flow + +OpenWork uses a single reload-required flow for changes that only take effect when OpenCode restarts. + +Key pieces: + +- `createSystemState()` owns the raw queued-reload state. +- `reloadPending()` means a reload is currently queued for the active workspace. +- `markReloadRequired(reason, trigger)` queues the reload and records the source that caused it. +- `app.tsx` exposes `reloadRequired(...sources)` as a small helper for UI filtering. It is used to decide whether the shared reload popup should show for a given trigger type. + +Use this flow when a change mutates startup-loaded OpenCode inputs, for example: + +- `opencode.json` +- `.opencode/skills/**` +- `.opencode/agents/**` +- `.opencode/commands/**` +- MCP definitions or plugin lists that OpenCode only loads at startup + +Do not invent a separate reload banner per feature. New UI that needs restart semantics should: + +1. perform the config or filesystem mutation +2. call `markReloadRequired(...)` +3. rely on the shared reload popup to explain and execute the restart path + +Current examples that should use this shared flow include MCP changes, auto context compaction, default model changes, authorized folder updates, plugin changes, and other `opencode.json` writes. + +When the desktop shell asks the OpenWork server to manage OpenCode, the managed +OpenCode process starts from a shell-owned local workdir under app data instead +of the user's selected workspace. Workspace-specific file access still flows +through the OpenWork server and `x-opencode-directory`, but startup no longer +depends on opening a project `opencode.json` from slow cloud-synced folders such +as iCloud Drive. + +## opencode primitives +how to pick the right extension abstraction for +@opencode + +opencode has a lot of extensibility options: +mcp / plugins / skills / bash / agents / commands + +- mcp +use when you need authenticated third-party flows (oauth) and want to expose that safely to end users +good fit when "auth + capability surface" is the product boundary +downside: you're limited to whatever surface area the server exposes + +- bash / raw cli +use only for the most advanced users or internal power workflows +highest risk, easiest to get out of hand (context creep + permission creep + footguns) +great for power users and prototyping, terrifying as a default for non-tech users + +- plugins +use when you need real tools in code and want to scope permissions around them +good middle ground: safer than raw cli, more flexible than mcp, reusable and testable +basically "guardrails + capability packaging" + +- skills +use when you want reliable plain-english patterns that shape behavior +best for repeatability and making workflows legible +pro tip: pair skills with plugins or cli (i literally embed skills inside plugins right now and expose commands like get_skills / retrieve) + +- agents +use when you need to create tasks that are executed by different models than the main one and might have some extra context to find skills or interact with mcps. + +- commands +`/` commands that trigger tools + +These are all opencode primitives you can read the docs to find out exactly how to set them up. + +## Core Concepts of OpenWork + +- uses all these primitives +- uses native OpenCode commands for reusable flows (markdown files in `.opencode/commands`) +- adds a new abstraction "workspace" is a project folder and a simple .json file that includes a list of opencode primitives that map perfectly to an opencode workdir (not fully implemented) + - openwork can open a workpace.json and decide where to populate a folder with thse settings (not implemented today + +## Repository/component map + +- `/apps/app/`: OpenWork app UI (desktop/mobile/web client experience layer). +- `/apps/desktop/`: Tauri desktop shell that hosts the app UI and manages native process lifecycles. +- `/apps/server/`: OpenWork server (API/control layer consumed by the app). +- `/apps/orchestrator/`: OpenWork orchestrator CLI/daemon. In `start`/`serve` host mode it manages OpenWork server + OpenCode; in daemon mode it manages worker/sandbox lifecycle. +- `/apps/share/`: share-link publisher service for OpenWork bundle imports. +- `/ee/apps/landing/`: OpenWork landing page surfaces. +- `/ee/apps/den-web/`: Den web UI for sign-in, worker creation, and future user-management flows. +- `/ee/apps/den-api/`: Den control plane API (formerly `/ee/apps/den-controller/`) that provisions/spins up worker runtimes. +- `/ee/apps/den-worker-proxy/`: proxy layer that keeps Daytona API keys server-side, refreshes signed worker preview URLs, and forwards worker traffic so users do not manage provider keys directly. +- `/ee/apps/den-worker-runtime/`: worker runtime packaging (including Docker/runtime artifacts) deployed to Daytona sandboxes. + +## Core Architecture + +OpenWork is a client experience that consumes OpenWork server surfaces. + +### Provider-neutral app control surface + +OpenWork app control mode is owned by the UI runtime. The app exposes a +provider-neutral action registry through `window.__openworkControl` so external +controllers can inspect the current route, discover visible/safe actions, and +request an action by ID without depending on DOM scraping or a specific model +provider. + +Guidelines: + +- The app owns visible, screen-local state: which actions are available, which + element should be spotlighted, and how actions are choreographed so users can + see control happen. +- Controllers such as MCP bridges, test harnesses, or optional external drivers should + call the app control surface instead of reaching into app internals. +- Provider/API secrets and privileged filesystem or server mutations remain + server-owned; the app control surface should route those through OpenWork + server APIs rather than adding provider-specific behavior to the UI. +- Raw screenshot or coordinate-based control is a fallback for uninstrumented + surfaces, not the default architecture. + +### MCP UI Control profile + +OpenWork should standardize external app control through MCP where possible. The +app-local `window.__openworkControl` registry remains the source of current UI +affordances, but public integrations should expose those affordances as MCP +tools that follow `docs/mcp-ui-control-profile.md`: + +- `ui.snapshot` for current semantic app state +- `ui.list_actions` for currently available action metadata and input schemas +- `ui.execute_action` for running one semantic action by ID + +Standalone control clients such as HandsFree should be MCP clients first: they +can connect to any configured MCP server and call generic MCP tools. OpenWork's +local UI bridge is an implementation detail behind the OpenWork MCP surface. + +OpenWork supports two product runtime modes for users: + +- desktop +- web/cloud (also usable from mobile clients) + +OpenWork therefore has two runtime connection modes: + +### Mode A - Desktop + +- OpenWork runs on a desktop/laptop and can host OpenWork server surfaces locally. +- The OpenCode server runs on loopback (default `127.0.0.1:4096`). +- The OpenWork server also defaults to loopback-only access. Remote sharing is an explicit opt-in that rebinds the OpenWork server to `0.0.0.0` while keeping OpenCode on loopback. +- OpenWork UI connects via the official SDK and listens to events. +- OpenWork server is the local API/control layer for this mode and owns the managed OpenCode child lifecycle. + +### Mode B - Web/Cloud (can be mobile) + +- User signs in to hosted OpenWork web/app surfaces (including mobile browser/client access). +- User launches a cloud worker from hosted control plane. +- OpenWork returns remote connect credentials (`/w/ws_*` URL + access token). +- User connects from OpenWork app using `Add a worker` -> `Connect remote`. + +This model keeps the user experience consistent across self-hosted and hosted paths while preserving OpenCode parity. + +### Mode A composition (Tauri shell + local services) + +- `/apps/app/` runs as the product UI; on desktop it is hosted inside `/apps/desktop/` (Tauri webview). +- `/apps/desktop/` exposes native commands (`engine_*`, `orchestrator_*`, `openwork_server_*`) to start/stop local services and report status to the UI. +- `/apps/desktop/` is also the source of truth for desktop bootstrap config that must survive updates, including Den server targeting and forced-sign-in startup behavior. The shell reads a predictable external `desktop-bootstrap.json` from the host config directory (or `OPENWORK_DESKTOP_BOOTSTRAP_PATH` when explicitly overridden). Default builds consume that file when present; custom builds seed or overwrite it when their bundled bootstrap differs from the standard default. +- Desktop host runtime is server-managed: the shell starts OpenWork server with managed OpenCode enabled, and the UI consumes OpenWork server APIs. +- OpenWork server (`/apps/server/`) is the API surface consumed by the UI; it proxies OpenCode routes for the active workspace. +- Desktop-launched OpenCode credentials are always random, per-launch values generated by OpenWork. OpenCode stays on loopback and is intended to be reached through OpenWork server rather than exposed directly. + +```text +/apps/app UI + | + v +/apps/desktop (Tauri shell) + | + +--> /apps/server (OpenWork API + proxy surface) + | + +--> OpenCode +``` + +### Mode B composition (Web/Cloud services) + +- `/ee/apps/den-web/` is the hosted web control surface (sign-in, worker create, upcoming user management). +- `/ee/apps/den-api/` (formerly `/ee/apps/den-controller/`) is the cloud control plane API (auth/session + worker CRUD + provisioning orchestration). +- Desktop org runtime config is fetched from Den after sign-in and is treated as server-owned runtime policy. It is stored per organization in Den (`organization.desktop_app_restrictions`) as sparse negative restriction flags (for example `blockZenModel`) and managed from the cloud org settings UI, while install/bootstrap config remains shell-owned in the external bootstrap file and only contains base URL, optional API base URL, and the `forceSignin` startup flag. +- Daytona-backed workers mount a single shared provider volume and isolate each worker's persistent data by subpaths (`workers//workspace` and `workers//data`) rather than creating dedicated provider volumes per worker. +- `/ee/apps/den-worker-runtime/` defines the runtime packaging and boot path used inside cloud workers (including Docker/snapshot artifacts and `openwork serve` startup assumptions). +- `/ee/apps/den-worker-proxy/` fronts Daytona worker preview URLs, refreshes signed links with provider credentials, and proxies traffic to the worker runtime. +- The OpenWork app (desktop or mobile client) connects to worker OpenWork server surfaces via URL + token (`/w/ws_*` when available). + +```text +/ee/apps/den-web + | + v +/ee/apps/den-api (formerly /ee/apps/den-controller) + | + +--> Daytona/Render provisioning + | | + | v + | /ee/apps/den-worker-runtime -> openwork serve + OpenCode + | + +--> /ee/apps/den-worker-proxy (signed preview + proxy) + +OpenWork app/mobile client + -> Connect remote (URL + token) + -> worker OpenWork server surface +``` + +## Messaging Bridge + +OpenWork no longer starts or proxies an app-owned local messaging bridge in the desktop host runtime. Messaging surfaces must be provided by an external server/worker surface rather than Tauri, Electron, or OpenWork server launching a local `opencode-router` child. + +Terminology clarification: + +- `selected workspace` is a UI concept: the workspace the user is currently viewing and where compose/config actions should target. +- `runtime active workspace` is a backend concept: the workspace the local server/orchestrator currently reports as active. +- `watched workspace` is the desktop-host/runtime concept for which workspace root local file watching is currently attached to. +- These states must be treated separately. UI selection can change without implying that the backend has switched roots yet. +- In practice, `selected workspace` and `runtime active workspace` often converge once the user sends work, but they are allowed to diverge briefly while the UI is browsing another workspace. + +Desktop local OpenWork server ports: + +- Desktop-hosted local OpenWork server instances do not assume a fixed `8787` port. +- Each workspace gets a persistent preferred localhost port in the `48000-51000` range. +- On restart, desktop tries to reuse that workspace's saved port first. +- If that port is unavailable, desktop picks another free port in the same range and avoids ports already reserved by other known workspaces. + +```text +Shared-root case + +router root: /Users/me/projects + + /Users/me/projects/a OK + /Users/me/projects/b OK + /Users/me/projects/c OK + +Unrelated-root case + +router root: /Users/me/projects/a + + /Users/me/projects/a OK + /Users/me/other/b rejected + /tmp/c rejected +``` + +This is intentional for now: predictable scoping beats clever cross-root auto-routing. + +## Cloud Worker Connect Flow (Canonical) + +1. Authenticate in OpenWork Cloud control surface. +2. Launch worker (with checkout/paywall when needed). +3. Wait for provisioning and health. +4. Generate/retrieve connect credentials. +5. Connect in OpenWork app via deep link or manual URL + token. + +Technical note: + +- Default connect URL should be workspace-scoped (`/w/ws_*`) when available. +- Technical diagnostics (host URL, worker ID, raw logs) should be progressive disclosure, not default UI. + +## Web Parity + Filesystem Actions + +The browser runtime cannot read or write arbitrary local files. Any feature that: + +- reads skills/commands/plugins from `.opencode/` +- edits `SKILL.md` / command templates / `opencode.json` +- opens folders / reveals paths + +must be routed through a host-side service. + +In OpenWork, the long-term direction is: + +- Use the OpenWork server (`/apps/server/`) as the single API surface for filesystem-backed operations. +- Treat Tauri-only file operations as an implementation detail / convenience fallback, not a separate feature set. + +This ensures the same UI flows work on desktop, mobile, and web clients, with approvals and auditing handled centrally. + +## OpenCode Integration (Exact SDK + APIs) + +OpenWork uses the official JavaScript/TypeScript SDK: + +- Package: `@opencode-ai/sdk/v2` (UI should import `@opencode-ai/sdk/v2/client` to avoid Node-only server code) +- Purpose: type-safe client generated from OpenAPI spec + +### Engine Lifecycle + +#### Start server + client (Host mode) + +Use `createOpencode()` to launch the OpenCode server and create a client. + +```ts +import { createOpencode } from "@opencode-ai/sdk/v2"; + +const opencode = await createOpencode({ + hostname: "127.0.0.1", + port: 4096, + timeout: 5000, + config: { + model: "anthropic/claude-3-5-sonnet-20241022", + }, +}); + +const { client } = opencode; +// opencode.server.url is available +``` + +#### Connect to an existing server (Client mode) + +```ts +import { createOpencodeClient } from "@opencode-ai/sdk/v2/client"; + +const client = createOpencodeClient({ + baseUrl: "http://localhost:4096", + directory: "/path/to/project", +}); +``` + +### Health + Version + +- `client.global.health()` + - Used for startup checks, compatibility warnings, and diagnostics. + +### Event Streaming (Real-time UI) + +OpenWork must be real-time. It subscribes to SSE events: + +- `client.event.subscribe()` + +The UI uses these events to drive: + +- streaming assistant responses +- step-level tool execution timeline +- permission prompts +- session lifecycle changes + +### Sessions (Primary Primitive) + +OpenWork maps a "Task Run" to an OpenCode **Session**. + +Core methods: + +- `client.session.create()` +- `client.session.list()` +- `client.session.get()` +- `client.session.messages()` +- `client.session.prompt()` +- `client.session.abort()` +- `client.session.summarize()` + +### Files + Search + +OpenWork's file browser and "what changed" UI are powered by: + +- `client.find.text()` +- `client.find.files()` +- `client.find.symbols()` +- `client.file.read()` +- `client.file.status()` + +### Permissions + +OpenWork must surface permission requests clearly and respond explicitly. + +- Permission response API: + - `client.permission.reply({ requestID, reply })` (where `reply` is `once` | `always` | `reject`) + +OpenWork UI should: + +1. Show what is being requested (scope + reason). +2. Provide choices (allow once / allow for session / deny). +3. Post the response to the server. +4. Record the decision in the run's audit log. + +### Config + Providers + +OpenWork's settings pages use: + +- `client.config.get()` +- `client.config.providers()` +- `client.auth.set()` (optional flow to store keys) + +### Extensibility - Skills + Plugins + +OpenWork exposes two extension surfaces: + +1. **Skills** + - Installed into `.opencode/skills/*`. + - Skills can be imported from local directories or installed from curated lists. + +2. **Plugins (OpenCode)** + - Plugins are configured via `opencode.json` in the workspace. + - The format is the same as OpenCode CLI uses today. + - OpenWork should show plugin status and instructions; a native plugin manager is planned. + +### Engine reload (config refresh) + +- OpenWork server exposes `POST /workspace/:id/engine/reload`. +- It calls OpenCode `POST /instance/dispose` with the workspace directory to force a config re-read. +- Use after skills/plugins/MCP/config edits; reloads can interrupt active sessions. +- Reload requests follow OpenWork server approval rules. + +### Skill Registry (Current + Future) + +- Today, OpenWork only supports **curated lists + manual sources**. +- Future goals: + - in-app registry search + - curated list sync (e.g. Awesome Claude Skills) + - frictionless publishing without signup + +## Projects + Path + +- `client.project.list()` / `client.project.current()` +- `client.path.get()` + +OpenWork conceptually treats "workspace" as the current project/path. + +## Optional TUI Control (Advanced) + +The SDK exposes `client.tui.*` methods. OpenWork can optionally provide a "Developer Mode" screen to: + +- append/submit prompt +- open help/sessions/themes/models +- show toast + +This is optional and not required for non-technical MVP. + +## Folder Authorization Model + +OpenWork enforces folder access through **two layers**: + +1. **OpenWork UI authorization** + - user explicitly selects allowed folders via native picker + - OpenWork remembers allowed roots per profile + +2. **OpenCode server permissions** + - OpenCode requests permissions as needed + - OpenWork intercepts requests via events and displays them + +Rules: + +- Default deny for anything outside allowed roots. +- "Allow once" never expands persistent scope. +- "Allow for session" applies only to the session ID. +- "Always allow" (if offered) must be explicit and reversible. + +## Open Questions + +- Best packaging strategy for Host mode engine (bundled vs user-installed Node/runtime). +- Best remote transport for mobile client (LAN only vs optional tunnel). diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..fb9c64f38c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,55 @@ +# Code of Conduct + +## Our commitment + +We are committed to making participation in the OpenWork community a harassment-free +experience for everyone, regardless of age, body size, disability, ethnicity, sex +characteristics, gender identity and expression, level of experience, education, +socio-economic status, nationality, personal appearance, race, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, +inclusive, and healthy community. + +## Our standards + +Examples of behavior that contributes to a positive environment include: + +- Demonstrating empathy and kindness toward other people. +- Being respectful of differing opinions, viewpoints, and experiences. +- Giving and gracefully accepting constructive feedback. +- Taking responsibility and apologizing to those affected by our mistakes. +- Focusing on what is best for the community. + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery and sexual attention or advances. +- Trolling, insulting or derogatory comments, and personal or political attacks. +- Public or private harassment. +- Publishing others' private information without explicit permission. +- Other conduct that could reasonably be considered inappropriate in a professional + setting. + +## Enforcement responsibilities + +Project maintainers are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in response +to behavior they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all community spaces, including GitHub issues, +pull requests, discussions, and direct interactions in official OpenWork channels. + +## Reporting + +If you experience or witness unacceptable behavior, report it to +`benjamin.shafii@gmail.com` with as much context as possible. + +All reports will be reviewed and investigated promptly and fairly. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.1. + +[homepage]: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ diff --git a/DESIGN-LANGUAGE.md b/DESIGN-LANGUAGE.md new file mode 100644 index 0000000000..2e06374211 --- /dev/null +++ b/DESIGN-LANGUAGE.md @@ -0,0 +1,871 @@ +# OpenWork Design Language + +This is the definitive visual system for OpenWork product and landing work. + +OpenWork should feel like a premium work tool: calm, useful, technical, and trustworthy. The design should read as software first, not a flashy marketing site. The goal is clarity with taste, not visual noise. + +--- + +## 1. Core Design Position + +OpenWork design is: + +- quiet +- premium +- operational +- flat-first +- structured by typography, spacing, and borders +- atmospheric only in controlled places + +OpenWork design is **not**: + +- glossy +- glassy +- beige +- aggressively gradient-heavy +- border-heavy +- shadow-led +- decorative for its own sake + +The basic rule: + +> Use structure before effects. + +If something needs emphasis, prefer this order: + +1. layout +2. spacing +3. typography +4. opacity +5. background tint +6. border +7. shadow + +Shadow should almost never be the first tool. + +--- + +## 2. The OpenWork Mood + +The product should feel like: + +- a serious desktop tool +- a clean command center +- a modern open-source alternative to Claude Cowork +- something you would trust with real workflows, team sharing, and remote workers + +Tone: + +- polished, but restrained +- modern, but not trendy +- friendly, but not cute +- futuristic through discipline, not chrome + +--- + +## 3. Color + Surface Rules + +### Base page color + +- Default page/background base: very light cool neutral (`#f6f9fc` or equivalent) +- Prefer white and near-white surfaces over tinted beige panels +- Avoid warm paper/beige backgrounds unless there is a very strong reason + +### Surface hierarchy + +Use only a few layers: + +1. **Page background** +2. **Primary white surface** +3. **Soft secondary surface** +4. **Interactive selected state** + +Do not create lots of micro-layers. + +### Preferred surface treatments + +#### Flat app surface + +For most application UI: + +- white or near-white background +- 1px subtle border +- no visible shadow or only the smallest shadow possible + +#### Soft shell + +Use for landing sections that need grouping but should still feel calm. + +- `landing-shell-soft` style direction +- near-white background +- subtle edge definition +- **no box shadow by default** + +This is no longer landing-only in spirit. For app surfaces like modals, package builders, +and share flows, the same shell language is often the right starting point when the surface +represents a workflow object instead of generic settings chrome. + +#### Elevated showcase shell + +Use only when a hero/demo needs one extra level of emphasis. + +- may use `landing-shell` +- still soft +- never dark or “floating card everywhere” +- should be rare, not the default wrapper for all sections + +### Background imagery + +Allowed only when all of the following are true: + +- it sits behind content, not under core text blocks directly +- it is subtle +- it fades away or is spatially constrained +- it does not compete with reading + +Pattern/background image rules: + +- top-of-page background patterns should be low-opacity and fade out down the page +- section-specific image backgrounds are allowed for showcase frames +- content cards that sit on top of image backgrounds should still be white and legible +- use images as atmosphere, not content + +--- + +## 4. Borders + +Borders are one of the main structure tools in OpenWork. + +### Border philosophy + +- prefer soft gray borders +- prefer low contrast +- prefer consistency over emphasis + +### What not to do + +- do **not** use harsh black borders for selection +- do **not** outline selected cards with strong dark strokes +- do **not** stack border + heavy shadow + tint all at once + +### Good border usage + +- `border-gray-200` +- `border-gray-300` for stronger but still soft selection +- low-alpha white borders for translucent landing shells +- soft shell borders like `#eceef1` for app sidebars and large rounded utility panels + +Do not use a dark or high-contrast outline as the main styling for a small icon tile, +badge shell, or compact decorative container. If the element is just carrying an icon, +prefer a soft filled tile over an outlined chip. + +Selection should usually feel like: + +- soft neutral fill +- darker text +- optional tiny border or tiny shadow only when needed + +not: + +- dark outline +- glow +- hard stroke + +--- + +## 5. Shadows + +Shadows must be restrained. + +### General rule + +- App UI: almost flat +- Landing UI: soft and selective +- Selection states: tiny shadow only + +### Approved shadow levels + +#### None + +Default for most grouped surfaces. + +#### Tiny control shadow + +Use for active pills and secondary buttons: + +```css +0 0 0 1px rgba(0,0,0,0.06), +0 1px 2px 0 rgba(0,0,0,0.04) +``` + +#### Light card shadow + +Use sparingly for a main demo shell or one hero card. + +#### Strong CTA shadow + +Reserved for the primary CTA only. + +### Never do + +- large ambient shadows across many cards on one page +- floaty SaaS-marketing shadows everywhere +- using shadow as the main selected-state signal +- glassmorphism blur shadows in the app + +--- + +## 6. Geometry + Radius + +OpenWork should have a small set of radii and use them consistently. + +### Radius system + +- **Pills / buttons / chips:** `rounded-full` +- **Small controls / rows / compact cards:** `rounded-xl` +- **Medium panels / embedded demos:** `rounded-2xl` +- **Large showcase wrappers:** `rounded-3xl` or `rounded-[2.5rem]` +- **Sidebar/app shell wrappers:** `rounded-[2rem]` to `rounded-[2.5rem]` + +### Rules + +- Don’t mix too many different radii in one section +- If the outer shell is very rounded, inner panels should step down cleanly +- Pills should look intentional, not bubbly + +--- + +## 7. Typography + +Typography does most of the hierarchy work. + +### General tone + +- clean sans-serif +- medium weight for important labels +- gray text for explanatory copy +- no overly stylized headings + +### Hierarchy + +#### Eyebrows + +- uppercase +- tracked +- small (`text-[11px]`) +- muted gray + +#### Headlines + +- medium weight +- tight tracking +- dark ink (`#011627` or equivalent) +- large enough to lead, not shout + +#### Body + +- `text-sm` or `text-base` +- relaxed line height +- `text-gray-500` or `text-gray-600` + +#### Active explanatory text + +If paired with an active state (like a selected workflow descriptor), the copy may move from muted gray to dark ink. + +### Avoid + +- giant type jumps +- ultra-light weights +- loud uppercase body copy +- dense paragraphs without breathing room + +--- + +## 7.5 Copy Direction + +OpenWork copy should feel as disciplined as the UI. + +### General tone + +- concise +- product-led +- operational +- calm +- confident without overselling + +### Good copy behavior + +- lead with the main user value, not the implementation detail +- prefer one clear idea per sentence +- keep interface copy shorter than marketing copy +- make support text explain utility, not restate the headline in different words + +### Avoid + +- repetitive copy that says the same thing three ways +- enterprise filler words like "provisioned setups" when a simpler phrase exists +- admin-heavy or billing-heavy framing when the main value is team workflow +- overdescribing secondary features + +### Preferred OpenWork Cloud framing + +For OpenWork Cloud, the primary story is: + +1. share setup across the team/org +2. keep everything in sync +3. background agents are secondary / alpha +4. custom LLM providers are tertiary / coming soon + +Do not make the product read like: + +- a billing page first +- a hosting toggle first +- an equal split between desktop and Cloud + +It should read like: + +- team setup sharing first +- operational consistency second +- advanced/cloud extensions after that + +### Preferred terminology + +Use: + +- **OpenWork Cloud** +- **Shared setups** +- **Shared templates** +- **Custom LLM providers** +- **Background agents** + +Prefer: + +- "Manage your team’s setup, invite teammates, and keep everything in sync." +- "Create and update shared templates your team can use right away." +- "Standardize provider access for your team." + +Avoid: + +- "Den" in user-facing copy +- "Provisioned setups" +- "Configured setups" +- "Choose how to run..." when the real goal is to explain team value + +### Hierarchy rules for product pages + +For sign-in, checkout, and dashboard copy: + +- headline should state the core team value +- subcopy should explain the workflow benefit in one sentence +- supporting bullets/cards should not compete equally with the main value +- desktop should often appear as a fallback or secondary path, not a co-equal hero choice + +### Docs CTA language + +When linking to supporting documentation, prefer short utility labels: + +- **Learn how** +- **How sharing works** +- **Read the guide** + +These should feel like helpful follow-through, not a second headline. + +--- + +## 8. Buttons + +There are only a few button families in OpenWork. + +### 8.1 Primary button + +Use for the main action only. + +Characteristics: + +- dark fill (`#011627`) +- white text +- fully rounded pill +- slightly stronger shadow than the rest of the system +- feels decisive but still clean + +Canonical pattern: `doc-button` + +Use for: + +- Download +- Run task +- other main conversion/action moments + +### 8.2 Secondary button + +Use for support actions. + +Characteristics: + +- white fill +- no hard border +- tiny ring + small shadow +- black/dark text +- fully rounded pill + +Canonical pattern: `secondary-button` + +This is also the reference style for: + +- active segmented controls +- selected pills inside a track + +### 8.3 Tertiary / text actions + +Use for less important actions. + +Characteristics: + +- no heavy box treatment +- rely on text color and hover only + +### Button rules + +- Do not invent many new button styles +- Reuse the primary and secondary button logic whenever possible +- If a selector pill is active, it should usually resemble the secondary button family + +--- + +## 9. Selectors, Tabs, and Pills + +This is now one of the clearest OpenWork patterns. + +### Track pattern + +Use a soft segmented track: + +- light border +- subtle gray background +- full pill radius +- tiny inset padding + +Example structure: + +- track: `border border-gray-200 bg-gray-50/50 rounded-full p-1` +- active item: white pill + tiny shadow +- inactive item: muted text only + +### Active state + +Active tab/pill should look like: + +- white pill +- soft ring/shadow +- dark text + +### Inactive state + +Inactive tab/pill should look like: + +- no card chrome +- muted gray text +- stronger text on hover + +### Do not + +- use harsh dark borders for selection +- create heavy segmented controls with thick strokes +- use loud fills for tabs + +### Flat selected row pattern + +For app navigation, especially dashboard sidebars: + +- selected state should usually be a soft gray fill (`bg-gray-100` / `bg-slate-100` family) +- selected items should not default to white floating pills inside a white or near-white shell +- rely on fill + text weight before adding chrome +- hover state should usually be one step lighter than selected, not a different visual language + +--- + +## 10. Lists and Row Systems + +OpenWork has two primary list patterns. + +### 10.1 Operational row list + +Use for sessions, workspaces, activity rows, and compact app lists. + +Pattern: + +- flat container +- rounded-xl row +- light hover tint +- selected row uses a subtle fill and stronger text +- metadata remains quiet + +Good signals: + +- `font-medium` +- subtle background tint +- tiny status accent if needed +- rounded-2xl row inside a softer outer shell when the list is acting as a primary sidebar + +Bad signals: + +- white card floating above white page +- hard selected outline +- large shadows on list rows + +### 10.1a Sidebar shell pattern + +Use for app/dashboard sidebars when the sidebar itself should feel like a calm standalone object. + +Pattern: + +- outer shell uses a near-white neutral background, not pure white +- shell gets a large radius (`rounded-[2rem]` range) +- shell uses a faint border, often enough without any visible shadow +- internal rows stay flatter than the outer shell +- selected row uses a soft gray fill, not a stronger border treatment +- footer actions may appear as floating white pills/cards inside the shell if they need separation + +This is the right pattern for: + +- workspace sidebars +- Cloud dashboard sidebars +- utility navigation that should feel product-like rather than admin-like + +### 10.2 Text-led preview list + +Use when a list controls a larger preview panel to the right. + +Pattern: + +- no boxed cards for each item +- text blocks stacked vertically +- inactive items use lower opacity +- active item uses full opacity and darker copy + +This is the right pattern for: + +- feature explanation lists next to a demo panel +- “build / import / ready” style narratives + +--- + +## 11. Cards and Section Layouts + +### Explanatory cards + +Use only when the card itself is the unit of information. + +Should be: + +- simple +- lightly bordered +- white +- softly rounded + +### When not to use cards + +If the user is just choosing between three conceptual options, don’t force every option into a boxed card. Use: + +- pill selector +- text-only list +- opacity-driven stacked copy + +### Product object cards + +Use when the UI is presenting a reusable worker, template, integration, or packaged setup. + +Pattern: + +- soft shell or near-white card +- generous padding +- title first +- one short supporting sentence +- compact status pill in the top-right if needed +- actions inline underneath or within the card + +These should feel like curated product objects, not admin rows. + +### Icon tiles inside cards + +When a card uses an icon block: + +- use a soft filled tile (`bg-slate-50` / similar) +- prefer no visible border by default +- let size, radius, and fill define the tile +- if a muted version is needed, use a quieter fill rather than an outline + +Do not: + +- put a dark stroke around the icon tile +- make the icon tile look like a separate outlined button unless it actually is one +- introduce standalone black/ink borders for decorative icon wrappers + +### Section composition + +Most sections should follow one of these layouts: + +1. **Headline + supporting copy + CTA** +2. **Selector on left + live descriptor on right** +3. **Text list on left + preview/demo on right** +4. **Three-column summary cards** + +Do not mix too many interaction models in one section. + +--- + +## 12. Demo and Mockup Styling + +Embedded product demos should feel like software, not like illustrations. + +### Demo shell rules + +- white inner content area +- subtle chrome +- soft border +- restrained shadow +- clear spacing + +### If the outer frame is atmospheric + +Then the inner mockup must become simpler. + +Meaning: + +- image/pattern on outer background is okay +- inner card should stay clean and white +- do not combine colorful outer frame with complex inner effects + +### Content in demos + +- use real-looking interaction states +- keep labels readable +- emphasize utility over visual flourish + +### Packaged workflow surfaces + +When showing a workflow like share/package/export: + +- prefer a soft shell over default modal chrome +- make the core object the hero (template, worker, integration, package) +- reduce the number of nested bordered panels +- use one or two strong cards, then flatter supporting sections +- present actions as intentional product actions, not generic form controls + +--- + +## 13. Selection States + +Selection should usually be shown through one or more of: + +- darker text +- stronger opacity +- soft neutral fill +- soft gray border +- tiny shadow + +Selection should **not** usually be shown through: + +- black outline +- bright accent fill +- glow +- thick stroke + +OpenWork selection should feel confident, not loud. + +When a selected item sits inside a soft app shell, prefer: + +- tinted gray fill first +- then weight and text color +- then at most a tiny white badge or tiny control shadow for supporting UI + +Avoid making the selected state look like a separate floating card unless the interface is explicitly using segmented pills. + +--- + +## 13.5 Modal Surfaces + +Not every modal should look like a system dialog. + +For workflow modals (share, package, connect, publish, save to team): + +- use a large soft shell with a near-white background +- keep the header airy and typographic +- avoid harsh header separators unless they add real structure +- prefer one scrollable content region inside the shell +- use soft cards for major choices +- reduce mini-panels and stacked utility boxes + +Good modal direction: + +- feels like a product surface +- can contain object cards and actions +- uses soft hierarchy and breathing room + +Bad modal direction: + +- dense settings sheet +- too many small bordered sub-panels +- generic dialog chrome with no product feel + +--- + +## 14. Motion + +Motion should be tight and purposeful. + +### Allowed motion + +- pill transitions with spring +- short opacity transitions +- tiny translateY on primary CTA hover +- soft content crossfades + +### Avoid + +- floaty delayed animations everywhere +- scale-heavy hover effects +- decorative motion on non-interactive surfaces + +### Timing + +- interactions should feel immediate +- most transitions should live around `150ms–300ms` +- spring motion should be controlled, not bouncy + +--- + +## 15. OpenWork App vs Landing + +The app and the landing share one system, but not the same degree of atmosphere. + +### App + +- flatter +- more structural +- almost no decorative shadow +- almost no background texture +- strong emphasis on state clarity and density + +### Landing + +- may use soft shells +- may use one atmospheric background image/pattern in a controlled region +- may use more spacing and larger radii +- still must obey the same button, border, and selection rules + +Landing should feel like the same product family, not a separate visual brand. + +--- + +## 16. Anti-Patterns + +Do not introduce these: + +- beige canvases as default backgrounds +- harsh black selected borders +- random glassmorphism +- multiple heavy shadow systems on one screen +- over-rounded cards everywhere +- boxed selectors when text or pills would be clearer +- giant gradients behind readable text +- decorative badges/counters with no functional meaning +- hiding anchor labels just to show hover actions +- outlined icon chips that read darker than the card they sit inside + +If something looks “designed” before it looks “useful,” it is probably wrong. + +--- + +## 17. Canonical Component Patterns + +### Primary CTA + +- dark pill +- white text +- slight elevation + +### Secondary CTA / active segmented pill + +- white pill +- tiny ring + tiny shadow +- dark text + +### Selector track + +- light gray border +- soft neutral background +- internal padding +- active item is white + +### Text-led feature list + +- no cards +- stacked copy +- inactive items at reduced opacity +- active item at full opacity + +### Operational list row + +- rounded-xl +- subtle hover tint +- selected row uses fill/weight, not loud chrome + +### App sidebar shell + +- large rounded outer shell +- faint neutral background +- subtle border +- flat internal rows +- selected row uses soft gray fill +- floating footer action can be white if it needs separation from the shell + +### Share/package modal + +- soft shell modal +- object cards for reusable templates or integrations +- compact status pills +- strong dark primary CTA +- white secondary CTA with tiny ring/shadow +- avoid form-heavy utility styling unless the step is truly form-driven + +### Landing shell + +- reserved for hero/showcase moments +- use sparingly + +### Landing soft shell + +- flat, near-white, subtle border +- no shadow by default + +--- + +## 18. Design Decision Tests + +Before shipping a UI change, ask: + +1. Is this relying on layout and typography first, or on effects first? +2. Is the selected state soft and obvious, rather than harsh? +3. Are we reusing the existing primary/secondary button language? +4. Does this section need cards, or would pills / text / opacity be cleaner? +5. Is the shadow doing real work, or is it just decoration? +6. Would this still feel like OpenWork if all colors were muted? +7. Does this feel like one coherent product across app and landing? + +If the answer to those is not clearly yes, simplify. + +--- + +## 19. Canonical References in This Repo + +Use these as implementation references: + +- Landing button + shell primitives: `_repos/openwork/ee/apps/landing/app/globals.css` +- Landing hero and selector patterns: `_repos/openwork/ee/apps/landing/components/landing-home.tsx` +- Landing demo list rhythm: `_repos/openwork/ee/apps/landing/components/landing-app-demo-panel.tsx` +- Cloud dashboard sidebar shell + selected state: `_repos/openwork/ee/apps/den-web/app/(den)/o/[orgSlug]/dashboard/_components/org-dashboard-shell.tsx` +- Share/package modal direction: `_repos/openwork/apps/app/src/app/components/share-workspace-modal.tsx` +- App workspace/session list rhythm: `_repos/openwork/apps/app/src/app/components/session/workspace-session-list.tsx` + +When in doubt, prefer the calmer version. diff --git a/DESIGN-SYSTEM.md b/DESIGN-SYSTEM.md new file mode 100644 index 0000000000..efc252707c --- /dev/null +++ b/DESIGN-SYSTEM.md @@ -0,0 +1,474 @@ +# OpenWork Design System + +This document turns the visual direction in `DESIGN-LANGUAGE.md` into an implementation system that can unify: + +- `apps/app` (OpenWork app) +- `ee/apps/den-web` (OpenWork Cloud / Den web surfaces) +- `ee/apps/landing` (marketing + product storytelling) + +The goal is not to create three similar styles. The goal is one OpenWork design system with a few environment-specific expressions. + +--- + +## 1. Why this exists + +Today the product already has the beginnings of a system, but it is split across: + +- app-specific CSS variables in `apps/app/src/app/index.css` +- Tailwind theme setup in `apps/app/tailwind.config.ts` +- Radix color tokens in `apps/app/src/styles/colors.css` +- repeated utility-class decisions across app, Cloud, and landing + +That creates three problems: + +1. the app and Cloud can feel related but not identical +2. visual decisions are made at the screen level instead of the system level +3. tokens, primitives, and page composition rules are not clearly separated + +This file defines the missing structure. + +--- + +## 2. System model + +OpenWork should use a three-layer design system: + +### Layer 1: Foundations + +Raw design tokens: + +- color +- typography +- spacing +- radius +- shadow +- motion + +These are the only values components should depend on directly. + +### Layer 2: Semantic tokens + +Product-meaning tokens: + +- `surface.page` +- `surface.panel` +- `surface.sidebar` +- `text.primary` +- `text.secondary` +- `border.subtle` +- `action.primary.bg` +- `state.hover` +- `state.selected` + +These should map foundation tokens into product meaning. + +### Layer 3: Component primitives + +Reusable building blocks: + +- Button +- Card +- Input +- Modal shell +- Sidebar shell +- List row +- Status pill +- Section header +- Empty state + +Pages should mostly compose these primitives, not invent their own visual logic. + +--- + +## 3. Relationship to existing docs + +- `DESIGN-LANGUAGE.md` = visual philosophy and qualitative rules +- `DESIGN-SYSTEM.md` = implementation structure and migration plan + +If there is a conflict: + +1. `DESIGN-LANGUAGE.md` decides what the product should feel like +2. `DESIGN-SYSTEM.md` decides how to encode that in tokens and primitives + +--- + +## 4. Core principle: one system, three expressions + +OpenWork has three main UI contexts: + +1. **App expression** — denser, flatter, operational +2. **Cloud expression** — still operational, slightly more editorial and roomy +3. **Landing expression** — more atmospheric, but still clearly the same product family + +These should differ mostly in: + +- spacing density +- shell scale +- amount of atmosphere +- page composition + +They should **not** differ in: + +- brand color logic +- button language +- border philosophy +- type hierarchy +- selection behavior + +--- + +## 5. Canonical token architecture + +We should converge on a small token set that works everywhere. + +### 5.1 Foundation color tokens + +Use Radix as the raw palette source, but not as the public API for product styling. + +Raw palette source: + +- Radix gray/slate/sage for neutrals +- Radix red/amber/green/blue for semantic states + +### 5.2 Semantic color tokens + +Canonical semantic token set: + +- `--ow-color-page` +- `--ow-color-surface` +- `--ow-color-surface-subtle` +- `--ow-color-surface-sidebar` +- `--ow-color-border` +- `--ow-color-border-strong` +- `--ow-color-text` +- `--ow-color-text-muted` +- `--ow-color-text-subtle` +- `--ow-color-accent` +- `--ow-color-accent-hover` +- `--ow-color-hover` +- `--ow-color-active` +- `--ow-color-success` +- `--ow-color-warning` +- `--ow-color-danger` + +These should become the shared API across app and Cloud. + +### 5.3 Current mapping from app tokens + +Existing app tokens already point in the right direction: + +- `--dls-app-bg` -> `--ow-color-page` +- `--dls-surface` -> `--ow-color-surface` +- `--dls-sidebar` -> `--ow-color-surface-sidebar` +- `--dls-border` -> `--ow-color-border` +- `--dls-text-primary` -> `--ow-color-text` +- `--dls-text-secondary` -> `--ow-color-text-muted` +- `--dls-accent` -> `--ow-color-accent` +- `--dls-accent-hover` -> `--ow-color-accent-hover` + +We should migrate by aliasing first, not by breaking everything at once. + +--- + +## 6. Typography system + +Typography should be systemized into roles, not ad hoc text sizes. + +### Roles + +- **display** — rare marketing or hero usage +- **headline** — page and section headers +- **title** — card and object titles +- **body** — default reading text +- **meta** — labels, helper copy, secondary information +- **micro** — pills, badges, tiny metadata + +### Shared rules + +- one main sans family across product surfaces +- medium weight does the majority of hierarchy work +- muted text is the default support color +- avoid large type jumps inside the app + +--- + +## 7. Spacing system + +OpenWork should use a consistent spacing scale instead of one-off values. + +Recommended base scale: + +- 4 +- 8 +- 12 +- 16 +- 20 +- 24 +- 32 +- 40 +- 48 +- 64 + +### Usage guidance + +- micro control padding: 8–12 +- row padding: 12–16 +- card padding: 20–24 +- major section padding: 32–48 +- page rhythm: 48–64 on roomy surfaces, 24–32 in dense app surfaces + +--- + +## 8. Radius system + +Canonical radius roles: + +- `--ow-radius-control` — small controls and rows +- `--ow-radius-card` — cards and panels +- `--ow-radius-shell` — sidebars, large grouped containers, modal shells +- `--ow-radius-pill` — buttons, tabs, chips + +Suggested mapping: + +- control: 12px +- card: 16px +- shell: 24px–32px +- pill: 9999px + +--- + +## 9. Shadow system + +Shadow should be a named system with very few levels. + +- `--ow-shadow-none` +- `--ow-shadow-control` +- `--ow-shadow-card` +- `--ow-shadow-shell` + +Default behavior: + +- app: mostly `none` or `control` +- Cloud: mostly `none`, `control`, occasional `card` +- landing: selective `card` or `shell` + +--- + +## 10. Component primitive families + +We should explicitly define a small primitive set shared across product surfaces. + +### 10.1 Action primitives + +- Primary button +- Secondary button +- Ghost button +- Destructive button +- Segmented pill / tab item + +### 10.2 Structure primitives + +- Page shell +- Sidebar shell +- Card +- Quiet card +- Modal shell +- Section divider + +### 10.3 Input primitives + +- Text input +- Textarea +- Select +- Checkbox/radio treatment +- Inline field group + +### 10.4 Navigation primitives + +- Sidebar row +- List row +- Topbar item +- Breadcrumb / section tab + +### 10.5 Feedback primitives + +- Status pill +- Banner +- Empty state +- Toast + +--- + +## 11. System-first implementation rules + +### Rule 1: prefer semantic tokens over raw utility colors + +Prefer: + +- `bg-[var(--ow-color-surface)]` +- `text-[var(--ow-color-text-muted)]` + +Over: + +- `bg-white` +- `text-gray-500` + +Raw grays are still acceptable for temporary legacy usage, but new primitives should use semantic tokens. + +### Rule 2: page code should not define new visual language + +Page files can compose primitives and choose layouts. +They should not invent new button styles, new shadow rules, or new selection patterns. + +### Rule 3: Radix stays underneath the system + +Radix is the palette source. +OpenWork tokens are the product API. + +### Rule 4: app and Cloud should share primitives even if frameworks differ + +Even when implementations differ, the primitive names and behaviors should match. + +Example: + +- `Button` in app +- `Button` in den-web + +Both should resolve to the same token logic and visual rules. + +--- + +## 12. Migration strategy + +Do not redesign everything at once. +Use this sequence. + +### Phase 1: lock the foundations + +1. create canonical semantic tokens +2. alias current app tokens to the new token names +3. document primitive families and approved variants + +### Phase 2: unify the most reused primitives + +Start with: + +1. Button +2. Card +3. Input +4. Sidebar row +5. Modal shell + +These give the largest visual consistency gain. + +### Phase 3: unify shell patterns + +Standardize: + +- page background +- sidebar shell +- panel/card shell +- list row selection +- headers and section spacing + +### Phase 4: refactor high-traffic screens + +Prioritize: + +- workspace/session surfaces in `apps/app` +- Cloud dashboard shells in `ee/apps/den-web` +- share/package/connect flows in `apps/app` + +### Phase 5: remove local style drift + +As primitives stabilize: + +- reduce repeated one-off class recipes +- replace raw gray classes in repeated patterns +- collapse duplicate card/button/input styles into primitives + +--- + +## 13. Recommended initial source of truth files + +If we implement this system, the likely canonical files should be: + +- `DESIGN-LANGUAGE.md` — philosophy +- `DESIGN-SYSTEM.md` — system rules and migration plan +- `apps/app/src/app/index.css` — initial token host for app runtime +- `apps/app/tailwind.config.ts` — Tailwind token exposure +- `apps/app/src/app/components/button.tsx` — canonical action primitive start +- `apps/app/src/app/components/card.tsx` — canonical surface primitive start +- `apps/app/src/app/components/text-input.tsx` — canonical field primitive start + +Later, a shared package may make sense, but not before the token model is stable. + +--- + +## 14. Recommended file plan for the next step + +The smallest safe implementation path is: + +### Step A + +Introduce canonical `--ow-*` aliases in `apps/app/src/app/index.css` without removing `--dls-*` yet. + +### Step B + +Refactor `Button`, `Card`, and `TextInput` to consume shared semantic tokens. + +### Step C + +Use the Den dashboard shell as the reference for: + +- sidebar shell +- row selection +- neutral panel rhythm + +### Step D + +Restyle one OpenWork app screen fully using the system to prove the direction. + +Recommended pilot screens: + +- `apps/app/src/app/pages/settings.tsx` +- session/workspace sidebar surfaces +- share workspace modal + +--- + +## 15. What a successful system looks like + +We will know this is working when: + +1. app, Cloud, and landing feel obviously from the same product family +2. a new screen can be built mostly from existing primitives +3. visual changes happen by adjusting tokens or primitives, not by editing many pages +4. selection, buttons, cards, and inputs behave consistently everywhere +5. raw color classes become uncommon outside truly local exceptions + +--- + +## 16. Anti-goals + +This system should not: + +- introduce a trendy visual reboot disconnected from the current product +- replace the OpenWork mood described in `DESIGN-LANGUAGE.md` +- depend on a large new dependency just to manage styling +- force a shared package too early +- block incremental improvements until a perfect system exists + +The correct approach is a strong design system built through small, boring, compounding steps. + +--- + +## 17. Immediate next recommendation + +If continuing from this doc, the best next change is: + +1. add `--ow-*` semantic token aliases in `apps/app/src/app/index.css` +2. standardize `Button`, `Card`, and `TextInput` +3. then restyle one app shell to match the calmer Den dashboard direction + +That gives a real system foothold without a broad rewrite. diff --git a/INFRASTRUCTURE.md b/INFRASTRUCTURE.md new file mode 100644 index 0000000000..018c6b1f61 --- /dev/null +++ b/INFRASTRUCTURE.md @@ -0,0 +1,103 @@ +# OpenWork Infrastructure Principles + +OpenWork is an experience layer. `opencode` is the engine. This document defines how infrastructure is built so every component is usable on its own, composable as a sidecar, and easy to automate. + +## Core Principles + +1. CLI-first, always + +* Every infrastructure component must be runnable via a single CLI command. +* The OpenWork UI may wrap these, but never replace or lock them out. + +2. Unix-like interfaces + +* Prefer simple, composable boundaries: JSON over stdout, flags, and env vars. +* Favor readable logs and predictable exit codes. + +3. Sidecar-composable + +* Any component must run as a sidecar without special casing. +* The UI should connect to the same surface area the CLI exposes. + +4. Clear boundaries + +* OpenCode remains the engine; OpenWork adds a thin config + UX layer. +* When OpenCode exposes a stable API, use it instead of re-implementing. + +5. Local-first, graceful degradation + +* Default to local execution. +* Hosted cloud is a first-class option, not a separate product. +* If a sidecar is missing or offline, the UI falls back to read-only or explicit user guidance. + +6. Portable configuration + +* Use config files + env vars; avoid hidden state. +* Keep credentials outside git and outside the repo. + +7. Observability by default + +* Provide health endpoints and structured logs. +* Record audit events for every config mutation. + +8. Security + scoping + +* All filesystem access is scoped to explicit workspace roots. +* Writes require explicit host approval when requested remotely. + +9. Debuggable by agents + Agents like (you?) make tool calls tool calls can do a variety of things form using chrome + to calling curl, using the cli, using bun, making scripts. + +You're not afraid to run the program on your OS but to benefit from it you need to design the arch +so these things are callable. + +E.g. it is very hard to call a things from the desktop app (you have not a lot of control). + +But what you can do is: + +* run the undelrying clis (since they are implented as sidecar) +* run against real opencode value +* use bash to test endpionts of these various servers/etc +* if needed don't hestiate to ask for credentialse.g. to test telegram or other similar flow + -you should be able to test 99% of the flow on your own + +## Applied to Current Components + +### opencode Engine + +* Always usable via `opencode` CLI. +* OpenWork never replaces the CLI; it only connects to the engine. + +### OpenWork Server + +* Runs standalone via `openwork-server` CLI. +* Provides filesystem-backed config surfaces (skills, plugins, MCP, commands). +* Sidecar lifecycle is described in `packages/app/pr/openwork-server.md`. +* Can also be consumed as a hosted OpenWork Cloud control surface for remote worker lifecycle. + +### OpenWork Cloud Control Plane + +* Hosted deployment of OpenWork server capabilities for worker provisioning and remote connect. +* Must preserve the same user-level contract as self-hosted paths: + - launch worker + - get connect credentials (URL + token) + - connect via `Add worker` -> `Connect remote` +* Should not require a separate mental model for users moving between local and hosted modes. + +### OpenCode Router + +* Runs standalone via `opencode-router` CLI. +* Must be able to use OpenWork server for config and approvals. + +## Non-goals + +* Replacing OpenCode primitives with custom abstractions. +* Forcing cloud-only lock-in (self-hosted desktop/CLI paths must remain valid). + +## References + +* `VISION.md` +* `PRINCIPLES.md` +* `ARCHITECTURE.md` +* `packages/app/pr/openwork-server.md` diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..546b09b5df --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2026-present Different AI, Inc. + +Portions of this software are licensed as follows: + +* All content that resides under the /ee directory of this repository is licensed under the license defined in "ee/LICENSE" (Fair Source License). +* All third party components incorporated into the OpenWork Software are licensed under the original license provided by the owner of the applicable component. +* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below. + +MIT License + +Copyright (c) 2026 Different AI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/PRINCIPLES.md b/PRINCIPLES.md new file mode 100644 index 0000000000..6bcb80d845 --- /dev/null +++ b/PRINCIPLES.md @@ -0,0 +1,31 @@ +# OpenWork Principles + +## Decision framework for adding new features or fixing bugs: + +- is it easy to test? how can we make it more easy ? (e.g. we can use the chrome mcp and pnpm:dev to test ui take screenshots) +- is there an existing opencode equivalent for this feature? (we should use it if we can) if not how does it map to a better user experience for bob *or* susan (see below) +- if it's a bug what were you testing? what were you trying to achieve? what did you observe we can't move on before having a core undesrtanding + +## Constraints + +- Work with **only the folders the user authorizes**. +- Treat **plugins + skills + commands + mcp** as the primary extensibility system. These are native to OpenCode and OpenWork must be a thin layer on top of them. They're mostly fs based. + +## Principles + +- **Parity**: UI actions map to OpenCode server APIs. +- **Server-consumption first**: OpenWork app consumes OpenWork server surfaces (desktop-hosted, orchestrator-hosted, or cloud-hosted) instead of inventing parallel behavior. +- **Transparency**: plans, steps, tool calls, permissions are visible. +- **Least privilege**: only user-authorized folders + explicit approvals. +- **Prompt is the workflow**: product logic lives in prompts, rules, and skills. +- **Graceful degradation**: if access is missing, guide the user. +- **Progressive disclosure by default**: non-technical users should see clear primary actions first; IDs/URLs/diagnostics stay behind explicit "manual" or "advanced" sections. +- **Cloud + self-hosted consistency**: the same connect mental model (`Add worker` -> `Connect remote`) should work regardless of where the server runs. + +## Security & Privacy + +- Local-first by default. +- No secrets in git. +- Use OS keychain for credentials. +- Clear, explicit permissions. +- Exportable audit logs. diff --git a/PRODUCT.md b/PRODUCT.md new file mode 100644 index 0000000000..610040c5a0 --- /dev/null +++ b/PRODUCT.md @@ -0,0 +1,60 @@ +## Product + +OpenWork helps individual create, consume, and maintain their agentic workflows. + +OpenWork helps companies share their agentic workflows and provision their entire team. + +The chat interfaces is where people consume the workflows. + +Interfaces for consuming workflows: +- Desktop app +- Slack +- Telegram + +What is a "agentic workflow": +- LLM providers +- Skills +- MCP +- Agents +- Plugins +- Tools +- Background Agents + +Where are workflows created: +- Desktop app (using slash commands like `/create-skills`) +- Web App +- [We need better places for this to happen[ + +Where are workflows maintain: +- In OpenWork Cloud (internal name is Den). + +Where are workflow hosted: +- Local Machine +- Remote via a OpenWork Host (CLI or desktop) +- Remote on OpenWork Cloud (via Den sandbox workers) + +## Current OpenWork Cloud flow + +- Users can sign in with the standard web auth providers or accept an org invite through the hosted join flow. +- Invite signup keeps the invited email fixed, verifies the user by email code, and then drops them into the org join path. +- Cloud workers are a paid flow: users complete checkout before they can launch hosted workers. +- After a worker is ready, the user connects from the OpenWork app with `Add a worker` -> `Connect remote`, or opens the generated deep link directly. + +## Team distribution + +- Organizations can publish shared skill hubs so members discover approved skills from one managed place instead of collecting local-only installs by hand. + +## Actors +Bob IT guy makes the config. +Susan the accountant consumes the config. + +Constraints: +- We use standards were possible +- We use opencode where possible +- We stay platform agnostic + + +How to decide if OpenWork should do something: +- Does it help Bob share config more easily? +- Does it help Susan consume shared workflows more easily? +- Is this something that is coding specific? diff --git a/README.md b/README.md index 0c095c8b14..0bbe3325cc 100644 --- a/README.md +++ b/README.md @@ -1,104 +1,251 @@ -# OpenWork +> OpenWork is the open source alternative to Claude Cowork/Codex (desktop app). -OpenWork is an **extensible, open-source “Claude Work” style system for knowledge workers**. -It’s a native desktop app (Tauri) that runs **OpenCode** under the hood, but presents it as a clean, guided workflow: -- pick a workspace -- start a run -- watch progress + plan updates -- approve permissions when needed -- reuse what works (templates + skills) +## Core Philosophy -The goal: make “agentic work” feel like a product, not a terminal. +- Local-first, cloud-ready: OpenWork runs on your machine in one click. Send a message instantly. +- Composable: desktop app, Slack/Telegram connector, or server. Use what fits, no lock-in. +- Ejectable: OpenWork is powered by OpenCode, so everything OpenCode can do works in OpenWork, even without a UI yet. +- Sharing is caring: start solo on localhost, then explicitly opt into remote sharing when you need it. + +

+ OpenWork demo +

+ +OpenWork is designed around the idea that you can easily ship your agentic workflows for your team as a repeatable, productized process. + +> [!TIP] +> **Looking for an [Enterprise Plan](https://openworklabs.com/enterprise)?** [Speak with our Sales Team today](https://calendar.app.google/86QpCENvhfEzDFLu5) +> +> Get enhanced capabilities including feature prioritization, SSO, SLA support, LTS versions, and more. + +## Alternate UIs +- **OpenWork Orchestrator (CLI host)**: run OpenCode + OpenWork server without the desktop UI. + - install: `npm install -g openwork-orchestrator` + - run: `openwork start --workspace /path/to/workspace --approval auto` + - docs: [apps/orchestrator/README.md](./apps/orchestrator/README.md) + +## Quick start + +Download the desktop app from [openworklabs.com/download](https://openworklabs.com/download), grab the latest [GitHub release](https://github.com/different-ai/openwork/releases), or install from source below. + +- macOS and Linux downloads are available directly. +- Windows access is currently handled through the paid support plan on [openworklabs.com/pricing#windows-support](https://openworklabs.com/pricing#windows-support). +- Hosted OpenWork Cloud workers are launched from the web app after checkout, then connected from the desktop app via `Add a worker` -> `Connect remote`. ## Why -Knowledge workers don’t want to learn a CLI, fight config sprawl, or rebuild the same workflows in every repo. +Current CLI and GUIs for opencode are anchored around developers. That means a focus on file diffs, tool names, and hard to extend capabilities without relying on exposing some form of cli. + OpenWork is designed to be: -- **Extensible**: skills and workflows are installable modules. + +- **Extensible**: skill and opencode plugins are installable modules. - **Auditable**: show what happened, when, and why. -- **Permissioned**: explicit user approval for risky actions. -- **Portable**: keep logic in prompts/skills, not bespoke code. +- **Permissioned**: access to privileged flows. +- **Local/Remote**: OpenWork works locally as well as can connect to remote servers. -## What’s Included (v0.1) +## What’s Included -- **Host mode**: start `opencode serve` locally in a chosen folder. +- **Host mode**: runs opencode locally on your computer - **Client mode**: connect to an existing OpenCode server by URL. - **Sessions**: create/select sessions and send prompts. - **Live streaming**: SSE `/event` subscription for realtime updates. - **Execution plan**: render OpenCode todos as a timeline. - **Permissions**: surface permission requests and reply (allow once / always / deny). - **Templates**: save and re-run common workflows (stored locally). +- **Debug exports**: copy or export the runtime debug report and developer log stream from Settings -> Debug when you need to file a bug. - **Skills manager**: - - list installed `.opencode/skill` folders - - install from OpenPackage (`opkg install ...`) - - import a local skill folder into `.opencode/skill/` + - list installed `.opencode/skills` folders + - import a local skill folder into `.opencode/skills/` + +## Skill Manager + +image + +## Works on local computer or servers + +Screenshot 2026-01-13 at 7 05 16 PM ## Quick Start ### Requirements - Node.js + `pnpm` -- Rust toolchain (for Tauri): `cargo`, `rustc` +- Rust toolchain (for Tauri): install via `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh` +- Tauri CLI: `cargo install tauri-cli` - OpenCode CLI installed and available on PATH: `opencode` +### Local Dev Prerequisites (Desktop) + +Before running `pnpm dev`, ensure these are installed and active in your shell: + +- Node + pnpm (repo uses `pnpm@10.27.0`) +- **Bun 1.3.9+** (`bun --version`) +- Rust toolchain (for Tauri), with Cargo from current `rustup` stable (supports `Cargo.lock` v4) +- Xcode Command Line Tools (macOS) +- On Linux, WebKitGTK 4.1 development packages so `pkg-config` can resolve `webkit2gtk-4.1` and `javascriptcoregtk-4.1` + +### One-minute sanity check + +Run from repo root: + +```bash +git checkout dev +git pull --ff-only origin dev +pnpm install --frozen-lockfile + +which bun +bun --version +pnpm --filter @openwork/desktop exec tauri --version +``` + ### Install ```bash pnpm install ``` +OpenWork now lives in `apps/app` (UI) and `apps/desktop` (desktop shell). + ### Run (Desktop) ```bash pnpm dev ``` +`pnpm dev` now enables `OPENWORK_DEV_MODE=1` automatically, so desktop dev uses an isolated OpenCode state instead of your personal global config/auth/data. + ### Run (Web UI only) ```bash -pnpm dev:web +pnpm dev:ui +``` + +All repo `dev` entrypoints now opt into the same dev-mode isolation so local testing uses the OpenWork-managed OpenCode state consistently. + +### Arch Users: + +```bash +sudo pacman -S --needed webkit2gtk-4.1 +curl -fsSL https://opencode.ai/install | bash -s -- --version "$(node -e "const fs=require('fs'); const parsed=JSON.parse(fs.readFileSync('constants.json','utf8')); process.stdout.write(String(parsed.opencodeVersion||'').trim().replace(/^v/,''));")" --no-modify-path ``` ## Architecture (high-level) -- In **Host mode**, OpenWork spawns: - - `opencode serve --hostname 127.0.0.1 --port ` - - with your selected project folder as the process working directory. +- In **Host mode**, OpenWork runs a local host stack and connects the UI to it. + - Default runtime: `openwork` (installed from `openwork-orchestrator`), which orchestrates `opencode`, `openwork-server`, and optionally `opencode-router`. + - Fallback runtime: `direct`, where the desktop app spawns `opencode serve --hostname 127.0.0.1 --port ` directly. + +When you select a project folder, OpenWork runs the host stack locally using that folder and connects the desktop UI. +This lets you run agentic workflows, send prompts, and see progress entirely on your machine without a remote server. + - The UI uses `@opencode-ai/sdk/v2/client` to: - connect to the server - list/create sessions - send prompts - - subscribe to SSE events + - subscribe to SSE events(Server-Sent Events are used to stream real-time updates from the server to the UI.) - read todos and permission requests ## Folder Picker The folder picker uses the Tauri dialog plugin. Capability permissions are defined in: -- `src-tauri/capabilities/default.json` -## OpenPackage Notes +- `apps/desktop/src-tauri/capabilities/default.json` -If `opkg` is not installed globally, OpenWork falls back to: +## OpenCode Plugins -```bash -pnpm dlx opkg install +Plugins are the **native** way to extend OpenCode. OpenWork now manages them from the Skills tab by +reading and writing `opencode.json`. + +- **Project scope**: `/opencode.json` +- **Global scope**: `~/.config/opencode/opencode.json` (or `$XDG_CONFIG_HOME/opencode/opencode.json`) + +You can still edit `opencode.json` manually; OpenWork uses the same format as the OpenCode CLI: + +```json +{ + "$schema": "https://opencode.ai/config.json", + "plugin": ["opencode-wakatime"] +} ``` ## Useful Commands ```bash +pnpm dev +pnpm dev:ui pnpm typecheck -pnpm build:web +pnpm build +pnpm build:ui pnpm test:e2e ``` +## Troubleshooting + +If you need to report a desktop or session bug, open Settings -> Debug and export both the runtime debug report and developer logs before filing an issue. + +### Linux / Wayland (Hyprland) + +If OpenWork crashes on launch with WebKitGTK errors like `Failed to create GBM buffer`, disable dmabuf or compositing before launch. Try one of the following environment flags. + +```bash +WEBKIT_DISABLE_DMABUF_RENDERER=1 openwork +``` + +```bash +WEBKIT_DISABLE_COMPOSITING_MODE=1 openwork +``` + ## Security Notes - OpenWork hides model reasoning and sensitive tool metadata by default. - Host mode binds to `127.0.0.1` by default. +## Contributing + +- Review `AGENTS.md` plus `VISION.md`, `PRINCIPLES.md`, `PRODUCT.md`, and `ARCHITECTURE.md` to understand the product goals before making changes. +- Ensure Node.js, `pnpm`, the Rust toolchain, and `opencode` are installed before working inside the repo. +- Run `pnpm install` once per checkout, then verify your change with `pnpm typecheck` plus `pnpm test:e2e` (or the targeted subset of scripts) before opening a PR. +- Use `.github/pull_request_template.md` when opening PRs and include exact commands, outcomes, manual verification steps, and evidence. +- If CI fails, classify failures in the PR body as either code-related regressions or external/environment/auth blockers. +- Add new PRDs to `apps/app/pr/.md` following the `.opencode/skills/prd-conventions/SKILL.md` conventions described in `AGENTS.md`. + +Community docs: + +- `CODE_OF_CONDUCT.md` +- `SECURITY.md` +- `SUPPORT.md` +- `TRIAGE.md` + +First contribution checklist: + +- [ ] Run `pnpm install` and baseline verification commands. +- [ ] Confirm your change has a clear issue link and scope. +- [ ] Add/update tests for behavioral changes. +- [ ] Include commands run and outcomes in your PR. +- [ ] Add screenshots/video for user-facing flow changes. + +## Supported Languages + +Translated READMEs: [`translated_readmes/`](./translated_readmes/README.md), available in English, 简体中文, 繁體中文, 日本語. + +The App is available in the following languages: +- English (`en`) +- French (`fr`) +- Spanish (`es`) +- Catalan (`ca`) +- Brazilian Portuguese (`pt-BR`) +- Japanese (`ja`) +- Simplified Chinese (`zh`) +- Thai (`th`) +- Vietnamese (`vi`) +- Russian (`ru`) + +## For Teams & Businesses + +Interested in using OpenWork in your organization? We'd love to hear from you — reach out at [ben@openworklabs.com](mailto:ben@openworklabs.com) to chat about your use case. + ## License -TBD. +MIT — see `LICENSE`. diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 0000000000..135da4bbd5 --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,64 @@ +# Release checklist + +OpenWork releases should be deterministic, easy to reproduce, and fully verifiable with CLI tooling. + +## Preflight + +- Sync the default branch (currently `dev`). +- Run `pnpm release:review` and fix any mismatches. +- If you are building sidecar assets, set `SOURCE_DATE_EPOCH` to the tag timestamp for deterministic manifests. + +## App release (desktop) + +1. Bump versions (app + desktop + Tauri + Cargo): + - `pnpm bump:patch` or `pnpm bump:minor` or `pnpm bump:major` +2. Re-run `pnpm release:review`. +3. Build sidecars for the desktop bundle: + - `pnpm --filter @different-ai/openwork prepare:sidecar` +4. Commit the version bump. +5. Tag and push: + - `git tag vX.Y.Z` + - `git push origin vX.Y.Z` + +## openwork-orchestrator (npm + sidecars) + +1. Bump versions (includes `packages/orchestrator/package.json`): + - `pnpm bump:patch` or `pnpm bump:minor` or `pnpm bump:major` +2. Build sidecar assets and manifest: + - `pnpm --filter openwork-orchestrator build:sidecars` +3. Create the GitHub release for sidecars: + - `gh release create openwork-orchestrator-vX.Y.Z packages/orchestrator/dist/sidecars/* --repo different-ai/openwork` +4. Publish the package: + - `pnpm --filter openwork-orchestrator publish --access public` + +## openwork-server + opencode-router (if version changed) + +- `pnpm --filter openwork-server publish --access public` +- `pnpm --filter opencode-router publish --access public` + +## Verification + +- `openwork start --workspace /path/to/workspace --check --check-events` +- `gh run list --repo different-ai/openwork --workflow "Release App" --limit 5` +- `gh release view vX.Y.Z --repo different-ai/openwork` + +Use `pnpm release:review --json` when automating these checks in scripts or agents. + +## AUR + +`Release App` publishes the Arch AUR package automatically after the Linux `.deb` asset is uploaded. + +For local AMD64 Arch builds without Docker, see `packaging/aur/README.md`. + +Required repo config: + +- GitHub Actions secret: `AUR_SSH_PRIVATE_KEY` (SSH key with push access to the AUR package repo) +- Optional repo variable: `AUR_REPO` (defaults to `openwork`) + +## npm publishing + +If you want `Release App` to publish `openwork-orchestrator`, `openwork-server`, and `opencode-router` to npm, configure: + +- GitHub Actions secret: `NPM_TOKEN` (npm automation token) + +If `NPM_TOKEN` is not set, the npm publish job is skipped. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..b8593f097e --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,33 @@ +# Security Policy + +## Supported versions + +OpenWork is under active development and we prioritize fixes on the latest release and +the current `dev` branch. + +## Reporting a vulnerability + +Please do not open public GitHub issues for security vulnerabilities. + +Instead, report vulnerabilities privately to: + +- Email: `ben@openworklabs.com` +- Subject: `[OpenWork security] ` + +Please include: + +- A clear description of the issue +- Reproduction steps or proof of concept +- Impact assessment +- Suggested remediation (if known) + +## Response expectations + +- We will acknowledge receipt within 3 business days. +- We will provide an initial triage status within 7 business days. +- We will share remediation or mitigation guidance as soon as available. + +## Disclosure guidance + +Please keep details private until a fix or mitigation is available and maintainers +confirm public disclosure timing. diff --git a/STATS.md b/STATS.md new file mode 100644 index 0000000000..0f82d3f162 --- /dev/null +++ b/STATS.md @@ -0,0 +1,110 @@ +# Download Stats + +Legacy cumulative release-asset totals. For classified v2 buckets, see `STATS_V2.md`. + +| Date | GitHub Downloads | Total | +|------|------------------|-------| +| 2026-01-24 | 15,879 (+15,879) | 15,879 (+15,879) | +| 2026-01-24 | 17,254 (+1,375) | 17,254 (+1,375) | +| 2026-01-24 | 17,254 (+0) | 17,254 (+0) | +| 2026-01-26 | 19,869 (+2,615) | 19,869 (+2,615) | +| 2026-01-27 | 23,489 (+3,620) | 23,489 (+3,620) | +| 2026-01-28 | 25,238 (+1,749) | 25,238 (+1,749) | +| 2026-01-29 | 26,939 (+1,701) | 26,939 (+1,701) | +| 2026-01-30 | 28,718 (+1,779) | 28,718 (+1,779) | +| 2026-01-31 | 30,070 (+1,352) | 30,070 (+1,352) | +| 2026-02-01 | 31,383 (+1,313) | 31,383 (+1,313) | +| 2026-02-02 | 33,206 (+1,823) | 33,206 (+1,823) | +| 2026-02-03 | 35,064 (+1,858) | 35,064 (+1,858) | +| 2026-02-04 | 38,330 (+3,266) | 38,330 (+3,266) | +| 2026-02-05 | 41,657 (+3,327) | 41,657 (+3,327) | +| 2026-02-06 | 44,561 (+2,904) | 44,561 (+2,904) | +| 2026-02-07 | 47,783 (+3,222) | 47,783 (+3,222) | +| 2026-02-08 | 51,070 (+3,287) | 51,070 (+3,287) | +| 2026-02-09 | 54,793 (+3,723) | 54,793 (+3,723) | +| 2026-02-10 | 58,605 (+3,812) | 58,605 (+3,812) | +| 2026-02-11 | 62,536 (+3,931) | 62,536 (+3,931) | +| 2026-02-12 | 66,149 (+3,613) | 66,149 (+3,613) | +| 2026-02-13 | 69,528 (+3,379) | 69,528 (+3,379) | +| 2026-02-14 | 72,204 (+2,676) | 72,204 (+2,676) | +| 2026-02-15 | 74,561 (+2,357) | 74,561 (+2,357) | +| 2026-02-16 | 77,144 (+2,583) | 77,144 (+2,583) | +| 2026-02-17 | 79,817 (+2,673) | 79,817 (+2,673) | +| 2026-02-18 | 83,020 (+3,203) | 83,020 (+3,203) | +| 2026-02-19 | 86,687 (+3,667) | 86,687 (+3,667) | +| 2026-02-20 | 90,491 (+3,804) | 90,491 (+3,804) | +| 2026-02-21 | 94,409 (+3,918) | 94,409 (+3,918) | +| 2026-02-22 | 99,076 (+4,667) | 99,076 (+4,667) | +| 2026-02-23 | 103,810 (+4,734) | 103,810 (+4,734) | +| 2026-02-24 | 108,788 (+4,978) | 108,788 (+4,978) | +| 2026-02-25 | 113,976 (+5,188) | 113,976 (+5,188) | +| 2026-02-26 | 119,570 (+5,594) | 119,570 (+5,594) | +| 2026-02-27 | 125,213 (+5,643) | 125,213 (+5,643) | +| 2026-02-28 | 130,766 (+5,553) | 130,766 (+5,553) | +| 2026-03-01 | 133,877 (+3,111) | 133,877 (+3,111) | +| 2026-03-02 | 139,092 (+5,215) | 139,092 (+5,215) | +| 2026-03-03 | 144,346 (+5,254) | 144,346 (+5,254) | +| 2026-03-04 | 148,772 (+4,426) | 148,772 (+4,426) | +| 2026-03-05 | 152,105 (+3,333) | 152,105 (+3,333) | +| 2026-03-06 | 155,629 (+3,524) | 155,629 (+3,524) | +| 2026-03-07 | 157,784 (+2,155) | 157,784 (+2,155) | +| 2026-03-07 | 158,107 (+323) | 158,107 (+323) | +| 2026-03-08 | 159,616 (+1,509) | 159,616 (+1,509) | +| 2026-03-09 | 162,103 (+2,487) | 162,103 (+2,487) | +| 2026-03-10 | 165,406 (+3,303) | 165,406 (+3,303) | +| 2026-03-11 | 168,897 (+3,491) | 168,897 (+3,491) | +| 2026-03-12 | 172,707 (+3,810) | 172,707 (+3,810) | +| 2026-03-13 | 176,511 (+3,804) | 176,511 (+3,804) | +| 2026-03-14 | 177,484 (+973) | 177,484 (+973) | +| 2026-03-15 | 178,354 (+870) | 178,354 (+870) | +| 2026-03-16 | 179,050 (+696) | 179,050 (+696) | +| 2026-03-17 | 180,297 (+1,247) | 180,297 (+1,247) | +| 2026-03-18 | 181,354 (+1,057) | 181,354 (+1,057) | +| 2026-03-19 | 182,208 (+854) | 182,208 (+854) | +| 2026-03-20 | 183,136 (+928) | 183,136 (+928) | +| 2026-03-21 | 184,156 (+1,020) | 184,156 (+1,020) | +| 2026-03-22 | 184,744 (+588) | 184,744 (+588) | +| 2026-03-23 | 185,371 (+627) | 185,371 (+627) | +| 2026-03-24 | 186,649 (+1,278) | 186,649 (+1,278) | +| 2026-03-25 | 187,746 (+1,097) | 187,746 (+1,097) | +| 2026-03-26 | 193,858 (+6,112) | 193,858 (+6,112) | +| 2026-03-27 | 200,722 (+6,864) | 200,722 (+6,864) | +| 2026-03-28 | 206,754 (+6,032) | 206,754 (+6,032) | +| 2026-03-29 | 211,210 (+4,456) | 211,210 (+4,456) | +| 2026-03-30 | 217,507 (+6,297) | 217,507 (+6,297) | +| 2026-03-31 | 225,120 (+7,613) | 225,120 (+7,613) | +| 2026-04-01 | 232,042 (+6,922) | 232,042 (+6,922) | +| 2026-04-02 | 251,721 (+19,679) | 251,721 (+19,679) | +| 2026-04-03 | 300,714 (+48,993) | 300,714 (+48,993) | +| 2026-04-04 | 345,411 (+44,697) | 345,411 (+44,697) | +| 2026-04-05 | 388,231 (+42,820) | 388,231 (+42,820) | +| 2026-04-06 | 428,311 (+40,080) | 428,311 (+40,080) | +| 2026-04-07 | 471,893 (+43,582) | 471,893 (+43,582) | +| 2026-04-08 | 517,274 (+45,381) | 517,274 (+45,381) | +| 2026-04-09 | 560,586 (+43,312) | 560,586 (+43,312) | +| 2026-04-10 | 602,133 (+41,547) | 602,133 (+41,547) | +| 2026-04-11 | 638,258 (+36,125) | 638,258 (+36,125) | +| 2026-04-12 | 671,199 (+32,941) | 671,199 (+32,941) | +| 2026-04-13 | 705,072 (+33,873) | 705,072 (+33,873) | +| 2026-04-14 | 725,846 (+20,774) | 725,846 (+20,774) | +| 2026-04-15 | 731,872 (+6,026) | 731,872 (+6,026) | +| 2026-04-16 | 738,176 (+6,304) | 738,176 (+6,304) | +| 2026-04-17 | 743,397 (+5,221) | 743,397 (+5,221) | +| 2026-04-18 | 747,542 (+4,145) | 747,542 (+4,145) | +| 2026-04-19 | 751,067 (+3,525) | 751,067 (+3,525) | +| 2026-04-20 | 755,765 (+4,698) | 755,765 (+4,698) | +| 2026-04-21 | 762,220 (+6,455) | 762,220 (+6,455) | +| 2026-04-22 | 767,840 (+5,620) | 767,840 (+5,620) | +| 2026-04-23 | 773,380 (+5,540) | 773,380 (+5,540) | +| 2026-04-24 | 778,287 (+4,907) | 778,287 (+4,907) | +| 2026-04-25 | 782,587 (+4,300) | 782,587 (+4,300) | +| 2026-04-26 | 786,791 (+4,204) | 786,791 (+4,204) | +| 2026-04-27 | 791,564 (+4,773) | 791,564 (+4,773) | +| 2026-04-28 | 795,811 (+4,247) | 795,811 (+4,247) | +| 2026-04-29 | 799,679 (+3,868) | 799,679 (+3,868) | +| 2026-04-30 | 805,584 (+5,905) | 805,584 (+5,905) | +| 2026-05-01 | 809,415 (+3,831) | 809,415 (+3,831) | +| 2026-05-02 | 813,107 (+3,692) | 813,107 (+3,692) | +| 2026-05-03 | 816,532 (+3,425) | 816,532 (+3,425) | +| 2026-05-04 | 820,823 (+4,291) | 820,823 (+4,291) | +| 2026-05-05 | 824,968 (+4,145) | 824,968 (+4,145) | diff --git a/STATS_V2.md b/STATS_V2.md new file mode 100644 index 0000000000..a93620053b --- /dev/null +++ b/STATS_V2.md @@ -0,0 +1,66 @@ +# Download Stats V2 + +Classified GitHub release asset snapshots. `Manual installs` counts installer downloads (`.dmg`, `.msi`, `.deb`, `.rpm`). `Updater` counts updater artifacts (`latest.json`, macOS updater bundles, updater signatures). `Other` captures signatures, sidecars, and uncategorized assets. + +| Date | Manual Installs | Updater | Other | All Release Assets | +|------|-----------------|---------|-------|--------------------| +| 2026-03-07 | 54,446 (+54,446) | 89,201 (+89,201) | 14,460 (+14,460) | 158,107 (+158,107) | +| 2026-03-08 | 54,727 (+281) | 90,315 (+1,114) | 14,574 (+114) | 159,616 (+1,509) | +| 2026-03-09 | 55,242 (+515) | 92,073 (+1,758) | 14,788 (+214) | 162,103 (+2,487) | +| 2026-03-10 | 56,051 (+809) | 94,325 (+2,252) | 15,030 (+242) | 165,406 (+3,303) | +| 2026-03-11 | 56,914 (+863) | 96,683 (+2,358) | 15,300 (+270) | 168,897 (+3,491) | +| 2026-03-12 | 57,703 (+789) | 99,150 (+2,467) | 15,854 (+554) | 172,707 (+3,810) | +| 2026-03-13 | 58,605 (+902) | 101,229 (+2,079) | 16,677 (+823) | 176,511 (+3,804) | +| 2026-03-14 | 58,838 (+233) | 101,875 (+646) | 16,771 (+94) | 177,484 (+973) | +| 2026-03-15 | 59,168 (+330) | 102,281 (+406) | 16,905 (+134) | 178,354 (+870) | +| 2026-03-16 | 59,363 (+195) | 102,655 (+374) | 17,032 (+127) | 179,050 (+696) | +| 2026-03-17 | 59,631 (+268) | 103,431 (+776) | 17,235 (+203) | 180,297 (+1,247) | +| 2026-03-18 | 59,845 (+214) | 104,136 (+705) | 17,373 (+138) | 181,354 (+1,057) | +| 2026-03-19 | 60,045 (+200) | 104,667 (+531) | 17,496 (+123) | 182,208 (+854) | +| 2026-03-20 | 60,221 (+176) | 105,278 (+611) | 17,637 (+141) | 183,136 (+928) | +| 2026-03-21 | 60,558 (+337) | 105,839 (+561) | 17,759 (+122) | 184,156 (+1,020) | +| 2026-03-22 | 60,687 (+129) | 106,219 (+380) | 17,838 (+79) | 184,744 (+588) | +| 2026-03-23 | 60,848 (+161) | 106,545 (+326) | 17,978 (+140) | 185,371 (+627) | +| 2026-03-24 | 61,247 (+399) | 107,230 (+685) | 18,172 (+194) | 186,649 (+1,278) | +| 2026-03-25 | 61,477 (+230) | 107,957 (+727) | 18,312 (+140) | 187,746 (+1,097) | +| 2026-03-26 | 63,032 (+1,555) | 112,084 (+4,127) | 18,742 (+430) | 193,858 (+6,112) | +| 2026-03-27 | 64,244 (+1,212) | 117,236 (+5,152) | 19,242 (+500) | 200,722 (+6,864) | +| 2026-03-28 | 65,441 (+1,197) | 121,574 (+4,338) | 19,739 (+497) | 206,754 (+6,032) | +| 2026-03-29 | 66,202 (+761) | 125,041 (+3,467) | 19,967 (+228) | 211,210 (+4,456) | +| 2026-03-30 | 67,249 (+1,047) | 129,987 (+4,946) | 20,271 (+304) | 217,507 (+6,297) | +| 2026-03-31 | 68,732 (+1,483) | 135,648 (+5,661) | 20,740 (+469) | 225,120 (+7,613) | +| 2026-04-01 | 69,871 (+1,139) | 140,959 (+5,311) | 21,212 (+472) | 232,042 (+6,922) | +| 2026-04-02 | 70,782 (+911) | 159,313 (+18,354) | 21,626 (+414) | 251,721 (+19,679) | +| 2026-04-03 | 71,365 (+583) | 207,310 (+47,997) | 22,039 (+413) | 300,714 (+48,993) | +| 2026-04-04 | 71,953 (+588) | 251,008 (+43,698) | 22,450 (+411) | 345,411 (+44,697) | +| 2026-04-05 | 72,498 (+545) | 292,876 (+41,868) | 22,857 (+407) | 388,231 (+42,820) | +| 2026-04-06 | 73,191 (+693) | 331,794 (+38,918) | 23,326 (+469) | 428,311 (+40,080) | +| 2026-04-07 | 73,774 (+583) | 374,167 (+42,373) | 23,952 (+626) | 471,893 (+43,582) | +| 2026-04-08 | 74,644 (+870) | 417,934 (+43,767) | 24,696 (+744) | 517,274 (+45,381) | +| 2026-04-09 | 75,240 (+596) | 460,144 (+42,210) | 25,202 (+506) | 560,586 (+43,312) | +| 2026-04-10 | 75,755 (+515) | 500,754 (+40,610) | 25,624 (+422) | 602,133 (+41,547) | +| 2026-04-11 | 76,295 (+540) | 535,996 (+35,242) | 25,967 (+343) | 638,258 (+36,125) | +| 2026-04-12 | 76,990 (+695) | 567,970 (+31,974) | 26,239 (+272) | 671,199 (+32,941) | +| 2026-04-13 | 77,567 (+577) | 600,843 (+32,873) | 26,662 (+423) | 705,072 (+33,873) | +| 2026-04-14 | 78,520 (+953) | 620,193 (+19,350) | 27,133 (+471) | 725,846 (+20,774) | +| 2026-04-15 | 79,422 (+902) | 624,874 (+4,681) | 27,576 (+443) | 731,872 (+6,026) | +| 2026-04-16 | 80,356 (+934) | 629,753 (+4,879) | 28,067 (+491) | 738,176 (+6,304) | +| 2026-04-17 | 81,107 (+751) | 633,807 (+4,054) | 28,483 (+416) | 743,397 (+5,221) | +| 2026-04-18 | 81,864 (+757) | 636,892 (+3,085) | 28,786 (+303) | 747,542 (+4,145) | +| 2026-04-19 | 82,486 (+622) | 639,536 (+2,644) | 29,045 (+259) | 751,067 (+3,525) | +| 2026-04-20 | 83,222 (+736) | 643,271 (+3,735) | 29,272 (+227) | 755,765 (+4,698) | +| 2026-04-21 | 84,271 (+1,049) | 648,249 (+4,978) | 29,700 (+428) | 762,220 (+6,455) | +| 2026-04-22 | 85,162 (+891) | 652,627 (+4,378) | 30,051 (+351) | 767,840 (+5,620) | +| 2026-04-23 | 86,133 (+971) | 656,777 (+4,150) | 30,470 (+419) | 773,380 (+5,540) | +| 2026-04-24 | 86,915 (+782) | 660,538 (+3,761) | 30,834 (+364) | 778,287 (+4,907) | +| 2026-04-25 | 87,706 (+791) | 663,737 (+3,199) | 31,144 (+310) | 782,587 (+4,300) | +| 2026-04-26 | 88,514 (+808) | 666,704 (+2,967) | 31,573 (+429) | 786,791 (+4,204) | +| 2026-04-27 | 89,347 (+833) | 670,293 (+3,589) | 31,924 (+351) | 791,564 (+4,773) | +| 2026-04-28 | 90,158 (+811) | 672,622 (+2,329) | 33,031 (+1,107) | 795,811 (+4,247) | +| 2026-04-29 | 91,297 (+1,139) | 674,416 (+1,794) | 33,966 (+935) | 799,679 (+3,868) | +| 2026-04-30 | 92,816 (+1,519) | 677,341 (+2,925) | 35,427 (+1,461) | 805,584 (+5,905) | +| 2026-05-01 | 93,823 (+1,007) | 678,963 (+1,622) | 36,629 (+1,202) | 809,415 (+3,831) | +| 2026-05-02 | 94,509 (+686) | 680,471 (+1,508) | 38,127 (+1,498) | 813,107 (+3,692) | +| 2026-05-03 | 95,237 (+728) | 681,632 (+1,161) | 39,663 (+1,536) | 816,532 (+3,425) | +| 2026-05-04 | 96,444 (+1,207) | 683,102 (+1,470) | 41,277 (+1,614) | 820,823 (+4,291) | +| 2026-05-05 | 97,232 (+788) | 684,504 (+1,402) | 43,232 (+1,955) | 824,968 (+4,145) | diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 0000000000..37b51dd6ef --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,23 @@ +# Support + +## Where to ask for help + +Use the right channel to get faster help: + +- **Questions / usage help**: open a GitHub issue and mark it as a question. +- **Bug reports**: use the Bug issue template. +- **Feature requests**: use the Feature issue template. +- **Security reports**: follow `SECURITY.md` and report privately. + +## Before opening an issue + +- Search existing issues to avoid duplicates. +- Include exact OpenWork/OpenCode versions, OS, and reproduction steps. +- For desktop, worker, or session bugs, open Settings -> Debug and include both: + - the runtime debug report + - the developer log export +- Add screenshots when they help explain the flow or failure state. + +## Maintainer triage + +Maintainers use the rubric in `TRIAGE.md` to label and route issues. diff --git a/TRANSLATIONS.md b/TRANSLATIONS.md new file mode 100644 index 0000000000..c65a0abddd --- /dev/null +++ b/TRANSLATIONS.md @@ -0,0 +1,27 @@ +# Help Translate OpenWork + +We are actively looking for contributors to translate OpenWork to your own native language. + +## README translations + +Translated README variants live in `translated_readmes/`, so adding a new language only touches the index there plus the supported languages list in the root `README.md`. + +If you want to add a new README language: + +1. Copy `README.md` to a new file like `translated_readmes/README_.md`. +2. Translate the content. +3. Add your new language link to `translated_readmes/README.md`. +4. Add your language name to the supported languages list at the bottom of `README.md`. +5. Open a PR. + +## App UI translations (i18n) + +You can also help translate the app UI via: + +- `apps/app/src/i18n/` + +Currently available app UI locales: English (`en`), Japanese (`ja`), Simplified Chinese (`zh`), Vietnamese (`vi`), Brazilian Portuguese (`pt-BR`), Russian (`ru`). + +Locale files live in `apps/app/src/i18n/locales/`. + +If you are unsure where to start, open an issue and mention the language you want to contribute. diff --git a/TRIAGE.md b/TRIAGE.md new file mode 100644 index 0000000000..ec4981f225 --- /dev/null +++ b/TRIAGE.md @@ -0,0 +1,35 @@ +# Issue Triage Rubric + +This document defines how maintainers triage issues consistently. + +## Core type labels + +- `bug`: behavior does not match expected behavior. +- `enhancement`: improvement to existing behavior. +- `question`: support or usage request. + +Apply exactly one core type label whenever possible. + +## Contribution-oriented labels + +- `good first issue`: small, well-scoped, low-risk, and has clear acceptance criteria. +- `help wanted` or `help needed`: maintainers welcome external contributions. +- `needs-info`: issue cannot be actioned until reporter provides missing details. + +## Suggested triage flow + +1. Confirm issue template fields are complete. +2. Add a core type label (`bug`, `enhancement`, or `question`). +3. Add scope/difficulty labels if helpful. +4. Add `needs-info` when reproduction details are missing. +5. Add `good first issue` or `help wanted/help needed` only when issue is ready to build. + +## Closing guidance + +Close as: + +- `duplicate`: issue already tracked elsewhere. +- `invalid`: report is not actionable or not a product issue. +- `wontfix`: acknowledged but not planned. + +When closing, include a short reason and a link to related issue/docs when available. diff --git a/VISION.md b/VISION.md new file mode 100644 index 0000000000..817d5524c0 --- /dev/null +++ b/VISION.md @@ -0,0 +1,31 @@ +# OpenWork Vision + +**Mission:** Make your company feel 1000× more productive. + +**How:** We give AI agents the tools your team already uses and let them learn from your behavior. The more you use OpenWork, the more connected your tools become, the more knowledge accumulates, and the bigger the chunks of work you can automate. + +**Today:** OpenWork is the simplest interface to `opencode` and OpenWork server surfaces. Double-click, pick a folder, and you get three things instantly: + +1. **Zero-friction setup** — your existing opencode configuration just works, no migration needed +2. **Chat access** — WhatsApp and Telegram ready to go (one token, done) +3. **Cloud-ready** — every app doubles as a client; connect to hosted workers from anywhere + +Current cloud mental model: + +- OpenWork app is the experience layer. +- OpenWork server is the control/API layer. +- OpenWork worker is the runtime destination. +- Connect flow is intentionally simple: `Add a worker` -> `Connect remote`. + +OpenWork helps users ship agentic workflows to their team. It works on top of opencode (opencode.ai) an agentic coding platform that exposes apis and sdks. We care about maximally using the opencode primitives. And build the thinest possible layer - always favoring opencode apis over custom built ones. + +In other words: +- OpenCode is the **engine**. +- OpenWork is the **experience** : onboarding, safety, permissions, progress, artifacts, and a premium-feeling UI. + +OpenWork competes directly with Anthropic's Cowork conceptually, but stays open, local-first, and standards-based. + +## Non-Goals + +- Replacing OpenCode's CLI/TUI. +- Creating bespoke "magic" capabilities that don't map to OpenCode APIs. diff --git a/app-demo.gif b/app-demo.gif new file mode 100644 index 0000000000..b4618fd1dd Binary files /dev/null and b/app-demo.gif differ diff --git a/apps/app/.env.migration-release b/apps/app/.env.migration-release new file mode 100644 index 0000000000..c47d147dfa --- /dev/null +++ b/apps/app/.env.migration-release @@ -0,0 +1,9 @@ +# Generated by scripts/migration/01-cut-migration-release.mjs. +# Consumed by apps/app Vite build during the v0.12.0 release only. +VITE_OPENWORK_MIGRATION_RELEASE=1 +VITE_OPENWORK_MIGRATION_VERSION=0.13.3 +VITE_OPENWORK_MIGRATION_MAC_ARM64_URL=https://github.com/different-ai/openwork/releases/download/v0.13.3/openwork-mac-arm64-0.13.3.zip +VITE_OPENWORK_MIGRATION_MAC_X64_URL=https://github.com/different-ai/openwork/releases/download/v0.13.3/openwork-mac-x64-0.13.3.zip +VITE_OPENWORK_MIGRATION_WINDOWS_X64_URL=https://github.com/different-ai/openwork/releases/download/v0.13.3/openwork-win-x64-0.13.3.exe +VITE_OPENWORK_MIGRATION_LINUX_ARM64_URL=https://github.com/different-ai/openwork/releases/download/v0.13.3/openwork-linux-arm64-0.13.3.AppImage +VITE_OPENWORK_MIGRATION_LINUX_X64_URL=https://github.com/different-ai/openwork/releases/download/v0.13.3/openwork-linux-x86_64-0.13.3.AppImage \ No newline at end of file diff --git a/apps/app/components.json b/apps/app/components.json new file mode 100644 index 0000000000..3781d892e6 --- /dev/null +++ b/apps/app/components.json @@ -0,0 +1,25 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "base-luma", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "tailwind.config.ts", + "css": "src/app/index.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "iconLibrary": "lucide", + "rtl": true, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "menuColor": "inverted-translucent", + "menuAccent": "subtle", + "registries": {} +} diff --git a/apps/app/index.html b/apps/app/index.html new file mode 100644 index 0000000000..7ac4f5640d --- /dev/null +++ b/apps/app/index.html @@ -0,0 +1,31 @@ + + + + + + OpenWork + + + + + + + + +
+ + + diff --git a/apps/app/package.json b/apps/app/package.json new file mode 100644 index 0000000000..b53f2254f3 --- /dev/null +++ b/apps/app/package.json @@ -0,0 +1,92 @@ +{ + "name": "@openwork/app", + "private": true, + "version": "0.13.3", + "type": "module", + "scripts": { + "dev": "OPENWORK_DEV_MODE=1 vite", + "dev:windows": "vite", + "prebuild": "pnpm --dir ../../packages/ui build", + "build": "vite build", + "dev:web": "OPENWORK_DEV_MODE=1 vite", + "prebuild:web": "pnpm --dir ../../packages/ui build", + "build:web": "vite build", + "preview": "vite preview", + "pretypecheck": "pnpm --dir ../../packages/ui build", + "typecheck": "tsc -p tsconfig.json --noEmit", + "test:health": "node scripts/health.mjs", + "test:mention-send": "node scripts/mention-send.mjs", + "test:sessions": "node scripts/sessions.mjs", + "test:refactor": "pnpm typecheck && pnpm test:health && pnpm test:sessions", + "test:events": "node scripts/events.mjs", + "test:todos": "node scripts/todos.mjs", + "test:permissions": "node scripts/permissions.mjs", + "test:remote-diagnostics": "bun test scripts/remote-workspace-diagnostics.test.ts", + "test:dev-log": "bun scripts/dev-log.ts", + "test:session-error-recovery": "bun scripts/session-error-recovery.ts", + "test:session-scope": "bun scripts/session-scope.ts", + "test:session-switch": "node scripts/session-switch.mjs", + "test:fs-engine": "node scripts/fs-engine.mjs", + "test:local-file-path": "node scripts/local-file-path.mjs", + "test:browser-entry": "node scripts/browser-entry.mjs", + "test:e2e": "pnpm test:local-file-path && node scripts/e2e.mjs && node scripts/session-switch.mjs && node scripts/fs-engine.mjs && node scripts/browser-entry.mjs", + "bump:patch": "node scripts/bump-version.mjs patch", + "bump:minor": "node scripts/bump-version.mjs minor", + "bump:major": "node scripts/bump-version.mjs major", + "bump:set": "node scripts/bump-version.mjs --set" + }, + "dependencies": { + "@ai-sdk/react": "^3.0.148", + "@base-ui/react": "^1.4.1", + "@codemirror/commands": "^6.8.0", + "@codemirror/lang-markdown": "^6.3.3", + "@codemirror/language": "^6.11.0", + "@codemirror/state": "^6.5.2", + "@codemirror/view": "^6.38.0", + "@fontsource-variable/geist": "^5.2.8", + "@fontsource-variable/ibm-plex-sans": "^5.2.8", + "@lexical/react": "^0.35.0", + "@opencode-ai/sdk": "^1.4.9", + "@openwork/types": "workspace:*", + "@openwork/ui": "workspace:*", + "@radix-ui/colors": "^3.0.0", + "@tanstack/react-query": "^5.90.3", + "@tanstack/react-virtual": "^3.13.23", + "@tauri-apps/api": "^2.0.0", + "@tauri-apps/plugin-deep-link": "^2.4.7", + "@tauri-apps/plugin-dialog": "~2.6.0", + "@tauri-apps/plugin-http": "~2.5.6", + "@tauri-apps/plugin-opener": "^2.5.3", + "@tauri-apps/plugin-process": "~2.3.1", + "@tauri-apps/plugin-updater": "~2.9.0", + "ai": "^6.0.146", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "fuzzysort": "^3.1.0", + "jsonc-parser": "^3.2.1", + "lexical": "^0.35.0", + "lucide-react": "^0.577.0", + "marked": "^17.0.1", + "react": "^19.1.1", + "react-dom": "^19.1.1", + "react-markdown": "^10.1.0", + "react-router-dom": "^7.14.1", + "remark-gfm": "^4.0.1", + "shadcn": "^4.6.0", + "streamdown": "^2.5.0", + "tailwind-merge": "^3.5.0", + "tw-animate-css": "^1.4.0", + "zustand": "^5.0.12" + }, + "devDependencies": { + "@tailwindcss/vite": "^4.1.18", + "@types/node": "^25.6.0", + "@types/react": "^19.2.2", + "@types/react-dom": "^19.2.2", + "@vitejs/plugin-react": "^5.0.4", + "tailwindcss": "^4.1.18", + "typescript": "^5.6.3", + "vite": "^6.0.1" + }, + "packageManager": "pnpm@10.27.0" +} diff --git a/apps/app/pr/1419-remote-worker-before.png b/apps/app/pr/1419-remote-worker-before.png new file mode 100644 index 0000000000..f78b4a27c1 Binary files /dev/null and b/apps/app/pr/1419-remote-worker-before.png differ diff --git a/apps/app/pr/1419-remote-worker-diagnostics-error.png b/apps/app/pr/1419-remote-worker-diagnostics-error.png new file mode 100644 index 0000000000..2b8a4488f1 Binary files /dev/null and b/apps/app/pr/1419-remote-worker-diagnostics-error.png differ diff --git a/apps/app/pr/cloud-settings-dark-after.png b/apps/app/pr/cloud-settings-dark-after.png new file mode 100644 index 0000000000..f7ea309b7f Binary files /dev/null and b/apps/app/pr/cloud-settings-dark-after.png differ diff --git a/apps/app/pr/cloud-settings-dark-before.png b/apps/app/pr/cloud-settings-dark-before.png new file mode 100644 index 0000000000..32d75f6e31 Binary files /dev/null and b/apps/app/pr/cloud-settings-dark-before.png differ diff --git a/apps/app/pr/environment-variables-dark.png b/apps/app/pr/environment-variables-dark.png new file mode 100644 index 0000000000..6375907079 Binary files /dev/null and b/apps/app/pr/environment-variables-dark.png differ diff --git a/apps/app/pr/environment-variables-demo.mp4 b/apps/app/pr/environment-variables-demo.mp4 new file mode 100644 index 0000000000..22b717d8ee Binary files /dev/null and b/apps/app/pr/environment-variables-demo.mp4 differ diff --git a/apps/app/pr/environment-variables.md b/apps/app/pr/environment-variables.md new file mode 100644 index 0000000000..0a9a0518f4 --- /dev/null +++ b/apps/app/pr/environment-variables.md @@ -0,0 +1,189 @@ +# Environment variables UI + +Closes #1436. + +## Why + +Agentic workflows pull in secrets from every direction — LLM provider keys, +ElevenLabs for TTS, Gemini / Nano Banana for images, GitHub tokens for repo +automation, cloud project IDs, corporate proxies and CA certs. Skills and +MCPs in a workspace assume those values exist in the process environment. + +Today the only way to get them there is to edit shell rc files and launch +OpenWork from a terminal, which: + +- **Breaks entirely on Linux GUI launches** (`.bashrc` isn't sourced) — the + concrete user report in #1436. +- **Is invisible friction for non-technical teammates** (the "Susan" persona + called out in `AGENTS.md`). +- Has no masking, no audit trail, no reserved-keys guardrail. + +This PR adds a first-class **Settings → Environment** pane. Credentials go +in once, and every child OpenWork spawns — OpenCode, the OpenWork server, +opencode-router, and any MCP or plugin those three launch — inherits them +via OS process environment. + +Boundaries vs. adjacent features: + +- Not a replacement for OpenCode's native `provider auth` flow, which owns + credentials for LLM providers OpenCode directly supports (stored in + `auth.json`). Users should keep using that for model keys where possible. +- Not a replacement for Den's cloud `LLM Providers` push, which owns + org-wide distribution for signed-in users. On remote workspaces, the pane + shows a read-only hint and does not fetch or display local env values. +- This fills the OSS / local-machine path for every other service skills + and MCPs call into — ElevenLabs, Gemini image APIs, GitHub, Notion, + LangSmith / OTEL exporters, proxy + CA-cert config, and so on. + +## Storage + +Deterministic path, identical across every loader: + +| OS | Path | +| --- | --- | +| Linux / macOS | `~/.config/openwork/env.json` | +| Windows | `%APPDATA%\openwork\env.json` | + +Override via `OPENWORK_ENV_STORE` (mirrors `OPENWORK_TOKEN_STORE`). The file +is written with `0o600` perms on POSIX. + +Shape: + +```json +{ + "schemaVersion": 1, + "updatedAt": 1714000000000, + "variables": [ + { "key": "ANTHROPIC_API_KEY", "value": "sk-ant-...", "updatedAt": 1714000000000 } + ] +} +``` + +## Server + +`EnvService` at `apps/server/src/env-file.ts` — mirrors the `TokenService` +pattern. Four desktop-host-token routes on the OpenWork server, so remote +owner/collaborator/viewer clients and OpenCode tools are structurally unable to +reach them: + +- `GET /env` → `{ items: [{ key, value, updatedAt }] }` (values raw; the UI masks presentationally) +- `GET /env/keys` → `{ keys: [...] }` (names only, used for agent context) +- `PUT /env` → single entry `{ key, value }` or batch `{ entries: [...] }` +- `DELETE /env/:key` + +## Shell spawn injection + +Same file, four loaders that agree byte-for-byte on path + reserved-keys policy: + +| Host | File | Integration point | +| --- | --- | --- | +| Tauri (Rust) | `apps/desktop/src-tauri/src/env_file.rs` | merged into 4 spawn sites alongside `bun_env_overrides()` | +| Electron (Node) | inline in `apps/desktop/electron/runtime.mjs` | single `buildChildEnv()` helper | +| Orchestrator (TS) | inline in `apps/orchestrator/src/cli.ts` | single `buildSpawnEnv()` helper | +| Server injection helper | `EnvService.readForInjection()` | reserved by consumers that want to reuse the TS reader | + +Merge order on every host: **user env first, process env / caller env wins.** +This matches the Linux-GUI case (no shell env → user env fills in) and never +lets the user shadow wiring the shell has already set. + +Reserved-keys policy: **any key starting with `OPENWORK_` or `OPENCODE_`** is +refused at write time and stripped at read time. Defends against a +hand-edited `env.json` that tries to shadow auth credentials. + +## UI + +`apps/app/src/react-app/domains/settings/pages/environment-view.tsx` — +self-contained React pane registered as a **global** settings tab (user-level +data, not workspace-scoped). Drops into the existing settings shell with one +line in each of `types.ts`, `settings-page.tsx`, `settings-route.tsx`. + +- Table with masked values (`ab••••yz`), reveal/hide toggle per row, add/edit + modal, delete-with-confirm. +- Client-side key validation mirrors the server (`^[A-Za-z_][A-Za-z0-9_]*$`) + + reserved-prefix check. +- Writes are saved immediately and then marked as pending. The user can click + **Apply changes** to restart the local agents so the new environment is + active without a full app relaunch. +- Remote workspaces show a read-only hint and do not list local env values. + +## Reload semantics + +Env vars are fixed in a process's environment at spawn time, so saving the +file alone cannot update an already-running OpenCode/server/router child. The +pane makes that explicit: after a successful write it shows a pending state and +an **Apply changes** action. Applying restarts the local OpenWork runtime with +the sidecar orchestrator path, preserves the local workspace list and remote +access setting, reconnects the client, then clears the pending state. + +Until pending changes are applied, the app does not inject newly saved key names +into agent system context. That avoids the agent claiming a key is configured +before the running processes can actually read it. + +The same restart boundary is used for delete: removed key names stop appearing +after **Apply changes**. + +## Agent context + +The app never sends secret values to the model. When there are no pending +environment changes, it calls `GET /env/keys` and sends only configured key +names as per-message system context: + +```text +OpenWork environment variables configured: +- EXAMPLE_API_KEY + +Only names are shown; values are secret. Use these names when relevant. +``` + +This is not written into `AGENTS.md`; OpenCode combines it with its normal +instruction sources for that prompt. + +## i18n + +All strings live under the `settings.environment.*` and +`settings.tab_*_environment` namespaces. Full translations in `en.ts`, +`zh.ts`, `ja.ts`. Other locales (`vi`, `pt-BR`, `th`, `fr`, `ca`, `es`) fall +back to English via the existing `t()` runtime (`i18n/index.ts:109-113`), so +nothing ships as raw keys. + +## Tests + +| Layer | File | What | +| --- | --- | --- | +| Server unit | `apps/server/src/env-file.test.ts` | 12 tests — path resolution, validation, reserved keys, perms, round-trip, tampered-file defense | +| Server HTTP e2e | `apps/server/src/env-routes.e2e.test.ts` | 12 tests — auth 401, owner-bearer rejection, CORS PUT preflight, PUT/GET round-trip, key-name-only route, batch PUT, invalid key 400, reserved key 400, DELETE missing/found, restart persistence | +| Tauri Rust unit | `apps/desktop/src-tauri/src/env_file.rs` | 4 tests — missing file, malformed JSON, well-formed load, reserved-key strip | + +Bun picks up `*.e2e.test.ts` automatically — no CI wiring change. + +## Verification ran in latest review + +``` +pnpm --filter openwork-server test # 107 pass, 0 fail +bun test ./src/env-file.test.ts ./src/env-routes.e2e.test.ts # 24 pass, 0 fail +pnpm --filter openwork-server typecheck # clean +pnpm --filter openwork-server build:bin # ok +pnpm --filter openwork-orchestrator typecheck # clean +pnpm --filter @openwork/app typecheck # clean +pnpm build:ui # ok (production Vite; large chunk warning unchanged) +node --check apps/desktop/electron/runtime.mjs # ok +node --check apps/desktop/scripts/prepare-sidecar.mjs # ok +node --check apps/desktop/scripts/tauri-before-dev.mjs # ok +PATH="$HOME/.cargo/bin:$PATH" cargo test env_file # 4 pass, 0 fail +git diff --check # clean +``` + +## Evidence + +- Screenshot: `apps/app/pr/environment-variables-dark.png` +- Demo recording: `apps/app/pr/environment-variables-demo.mp4` + +## Non-goals (follow-ups) + +- OS keychain storage (`PRINCIPLES.md` line 29). JSON + `0o600` matches the + existing `tokens.ts` precedent and keeps the Rust reader trivial (no + keychain FFI). A follow-up PR can migrate values into the keychain while + leaving the JSON file as a manifest of key names + timestamps. +- Per-workspace scoping. The issue asks for user-level; workspace overrides + are a separate feature. +- Cloud push for MCP keys — owned by the Den / LLM Providers team. diff --git a/apps/app/pr/first-boot-real-shell.png b/apps/app/pr/first-boot-real-shell.png new file mode 100644 index 0000000000..97db14c594 Binary files /dev/null and b/apps/app/pr/first-boot-real-shell.png differ diff --git a/apps/app/pr/permission-approval-dark.png b/apps/app/pr/permission-approval-dark.png new file mode 100644 index 0000000000..3570f98500 Binary files /dev/null and b/apps/app/pr/permission-approval-dark.png differ diff --git a/apps/app/pr/permission-approval-light.png b/apps/app/pr/permission-approval-light.png new file mode 100644 index 0000000000..9247c1317d Binary files /dev/null and b/apps/app/pr/permission-approval-light.png differ diff --git a/apps/app/pr/session-error-recovery-offline.png b/apps/app/pr/session-error-recovery-offline.png new file mode 100644 index 0000000000..3c23d828d3 Binary files /dev/null and b/apps/app/pr/session-error-recovery-offline.png differ diff --git a/apps/app/pr/session-scroll-loading-polish.png b/apps/app/pr/session-scroll-loading-polish.png new file mode 100644 index 0000000000..1c23026bb6 Binary files /dev/null and b/apps/app/pr/session-scroll-loading-polish.png differ diff --git a/apps/app/public/apple-touch-icon.png b/apps/app/public/apple-touch-icon.png new file mode 100644 index 0000000000..7c05dcc9b9 Binary files /dev/null and b/apps/app/public/apple-touch-icon.png differ diff --git a/apps/app/public/favicon-16x16.png b/apps/app/public/favicon-16x16.png new file mode 100644 index 0000000000..9be1ead979 Binary files /dev/null and b/apps/app/public/favicon-16x16.png differ diff --git a/apps/app/public/favicon-32x32.png b/apps/app/public/favicon-32x32.png new file mode 100644 index 0000000000..8a82bb5e58 Binary files /dev/null and b/apps/app/public/favicon-32x32.png differ diff --git a/apps/app/public/openwork-logo-square.svg b/apps/app/public/openwork-logo-square.svg new file mode 100644 index 0000000000..05e4ca1aab --- /dev/null +++ b/apps/app/public/openwork-logo-square.svg @@ -0,0 +1,2 @@ + + diff --git a/apps/app/public/openwork-logo.svg b/apps/app/public/openwork-logo.svg new file mode 100644 index 0000000000..1052922e80 --- /dev/null +++ b/apps/app/public/openwork-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/apps/app/public/openwork-mark.svg b/apps/app/public/openwork-mark.svg new file mode 100644 index 0000000000..e7ebb9153d --- /dev/null +++ b/apps/app/public/openwork-mark.svg @@ -0,0 +1,30 @@ + + + + + + + + diff --git a/scripts/_util.mjs b/apps/app/scripts/_util.mjs similarity index 84% rename from scripts/_util.mjs rename to apps/app/scripts/_util.mjs index 291b0238ce..aa6bcc0926 100644 --- a/scripts/_util.mjs +++ b/apps/app/scripts/_util.mjs @@ -2,14 +2,24 @@ import assert from "node:assert/strict"; import { spawn } from "node:child_process"; import { once } from "node:events"; import net from "node:net"; -import { realpathSync } from "node:fs"; +import { realpathSync, statSync } from "node:fs"; import { createOpencodeClient } from "@opencode-ai/sdk/v2/client"; +function resolveBasicAuthHeader() { + const password = process.env.OPENCODE_SERVER_PASSWORD?.trim() ?? ""; + if (!password) return undefined; + const username = process.env.OPENCODE_SERVER_USERNAME?.trim() || "opencode"; + const encoded = Buffer.from(`${username}:${password}`, "utf8").toString("base64"); + return `Basic ${encoded}`; +} + export function makeClient({ baseUrl, directory }) { + const authorization = resolveBasicAuthHeader(); return createOpencodeClient({ baseUrl, directory, + headers: authorization ? { Authorization: authorization } : undefined, responseStyle: "data", throwOnError: true, }); @@ -153,3 +163,12 @@ export function parseArgs(argv) { } return args; } + +export function canWriteWorkspace(directory) { + try { + const stat = statSync(directory); + return stat && stat.isDirectory(); + } catch { + return false; + } +} diff --git a/apps/app/scripts/browser-entry.mjs b/apps/app/scripts/browser-entry.mjs new file mode 100644 index 0000000000..f174b7981d --- /dev/null +++ b/apps/app/scripts/browser-entry.mjs @@ -0,0 +1,310 @@ +import assert from "node:assert"; +import http from "node:http"; +import os from "node:os"; +import path from "node:path"; +import { mkdtemp, mkdir, readFile, rm, writeFile } from "node:fs/promises"; + +import { findFreePort, makeClient, parseArgs, spawnOpencodeServe, waitForHealthy } from "./_util.mjs"; + +function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function writeSse(res, chunks) { + res.writeHead(200, { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + }); + for (const chunk of chunks) { + res.write(`data: ${JSON.stringify(chunk)}\n\n`); + } + res.write("data: [DONE]\n\n"); + res.end(); +} + +function createTextStream(text) { + return [ + { + id: "chatcmpl-1", + object: "chat.completion.chunk", + choices: [{ index: 0, delta: { role: "assistant" }, finish_reason: null }], + }, + { + id: "chatcmpl-1", + object: "chat.completion.chunk", + choices: [{ index: 0, delta: { content: text }, finish_reason: null }], + }, + { + id: "chatcmpl-1", + object: "chat.completion.chunk", + choices: [{ index: 0, delta: {}, finish_reason: "stop" }], + }, + ]; +} + +function createInvalidToolStream() { + return [ + { + id: "chatcmpl-2", + object: "chat.completion.chunk", + choices: [{ index: 0, delta: { role: "assistant" }, finish_reason: null }], + }, + { + id: "chatcmpl-2", + object: "chat.completion.chunk", + choices: [ + { + index: 0, + delta: { + tool_calls: [ + { + index: 0, + id: "call_1", + type: "function", + function: { name: "nonexistent_tool", arguments: "{}" }, + }, + ], + }, + finish_reason: null, + }, + ], + }, + { + id: "chatcmpl-2", + object: "chat.completion.chunk", + choices: [{ index: 0, delta: {}, finish_reason: "tool_calls" }], + }, + ]; +} + +function hasChromeQuickstartPrompt(haystack) { + return ( + haystack.includes("chrome devtools mcp") || + haystack.includes("chrome-devtools_*") || + haystack.includes("control chrome") + ); +} + +const args = parseArgs(process.argv.slice(2)); +const keepTmp = args.get("keep-tmp") === "true"; + +const results = { + ok: true, + steps: [], +}; + +function step(name, fn) { + results.steps.push({ name, status: "running" }); + const idx = results.steps.length - 1; + return Promise.resolve() + .then(fn) + .then((data) => { + results.steps[idx] = { name, status: "ok", data }; + }) + .catch((e) => { + results.ok = false; + results.steps[idx] = { + name, + status: "error", + error: e instanceof Error ? e.message : String(e), + }; + throw e; + }); +} + +let tmpdir; +let mock; +let opencode; +let sawChromeQuickstartPrompt = false; +const mockSockets = new Set(); + +try { + tmpdir = await mkdtemp(path.join(os.tmpdir(), "openwork-browser-entry-")); + + const templateUrl = new URL("../src/app/data/commands/browser-setup.md", import.meta.url); + const template = await readFile(templateUrl, "utf8"); + + await step("workspace.setup", async () => { + await mkdir(path.join(tmpdir, ".opencode", "commands"), { recursive: true }); + await writeFile(path.join(tmpdir, ".opencode", "commands", "browser-setup.md"), template, "utf8"); + return { tmpdir }; + }); + + const mockPort = await findFreePort(); + const baseURL = `http://127.0.0.1:${mockPort}/v1`; + + await step("provider.mock.start", async () => { + mock = http.createServer(async (req, res) => { + const url = new URL(req.url ?? "/", `http://${req.headers.host ?? "127.0.0.1"}`); + if (req.method === "GET" && url.pathname.endsWith("/models")) { + res.writeHead(200, { "Content-Type": "application/json" }); + res.end( + JSON.stringify({ + object: "list", + data: [{ id: "qwen-plus", object: "model" }], + }), + ); + return; + } + + if (req.method === "POST" && url.pathname.endsWith("/chat/completions")) { + const raw = await new Promise((resolve) => { + let data = ""; + req.setEncoding("utf8"); + req.on("data", (chunk) => (data += chunk)); + req.on("end", () => resolve(data)); + }); + + let body; + try { + body = raw ? JSON.parse(raw) : {}; + } catch { + body = {}; + } + + const haystack = JSON.stringify(body).toLowerCase(); + const triesChromeMcp = hasChromeQuickstartPrompt(haystack); + + if (triesChromeMcp) { + sawChromeQuickstartPrompt = true; + writeSse( + res, + createTextStream( + "Trying Control Chrome first. If Chrome MCP is unavailable, open the MCP tab, connect Control Chrome, and retry.", + ), + ); + } else { + writeSse(res, createInvalidToolStream()); + } + return; + } + + res.writeHead(404, { "Content-Type": "text/plain" }); + res.end("not found"); + }); + mock.on("connection", (socket) => { + mockSockets.add(socket); + socket.on("close", () => { + mockSockets.delete(socket); + }); + }); + + await new Promise((resolve) => mock.listen(mockPort, "127.0.0.1", resolve)); + return { baseURL }; + }); + + await step("workspace.config", async () => { + await writeFile( + path.join(tmpdir, "opencode.json"), + JSON.stringify( + { + $schema: "https://opencode.ai/config.json", + enabled_providers: ["alibaba"], + provider: { + alibaba: { + options: { + apiKey: "test-key", + baseURL, + }, + }, + }, + }, + null, + 2, + ), + "utf8", + ); + return {}; + }); + + const port = await findFreePort(); + opencode = await spawnOpencodeServe({ directory: tmpdir, port }); + const client = makeClient({ baseUrl: opencode.baseUrl, directory: opencode.cwd }); + + await step("health", async () => { + const health = await waitForHealthy(client); + return health; + }); + + let sessionId; + + await step("session.create", async () => { + const session = await client.session.create({ title: "OpenWork browser-entry test" }); + sessionId = session.id; + assert.ok(sessionId); + return { id: session.id }; + }); + + await step("session.command (browser-setup)", async () => { + await client.session.command({ + sessionID: sessionId, + command: "browser-setup", + arguments: "", + model: "alibaba/qwen-plus", + }); + return {}; + }); + + await step("assert.chrome-mcp-quickstart", async () => { + assert.equal(sawChromeQuickstartPrompt, true, "Expected browser quickstart prompt to reference Chrome DevTools MCP"); + return { sawChromeQuickstartPrompt }; + }); + + await step("assert.no-tool-errors", async () => { + const start = Date.now(); + // Keep this internal polling window short: the test should wait up to 12 seconds for the assistant response before failing + while (Date.now() - start < 12_000) { + const msgs = await client.session.messages({ sessionID: sessionId, limit: 50 }); + const parts = msgs.flatMap((m) => m.parts ?? []); + const toolErrors = parts.filter((p) => p?.type === "tool" && String(p?.state?.status ?? "").toLowerCase() === "error"); + if (toolErrors.length > 0) { + const first = toolErrors[0]; + const tool = typeof first.tool === "string" ? first.tool : "tool"; + const title = typeof first.state?.title === "string" ? first.state.title : ""; + const err = typeof first.state?.error === "string" ? first.state.error : ""; + throw new Error(`Unexpected tool error (${tool}): ${title} ${err}`.trim()); + } + + const hasAssistantText = msgs.some( + (m) => m.info?.role === "assistant" && (m.parts ?? []).some((p) => p.type === "text" && String(p.text ?? "").trim()), + ); + if (hasAssistantText) { + return { messages: msgs.length }; + } + + await sleep(250); + } + throw new Error("Timed out waiting for assistant response"); + }); + + console.log(JSON.stringify(results, null, 2)); +} catch (e) { + const message = e instanceof Error ? e.message : String(e); + results.ok = false; + results.error = message; + results.stderr = opencode?.getStderr?.() ?? ""; + console.error(JSON.stringify(results, null, 2)); + process.exitCode = 1; +} finally { + try { + if (opencode) await opencode.close(); + } catch { + // ignore + } + try { + if (mock) { + for (const socket of mockSockets) { + socket.destroy(); + } + await new Promise((resolve) => mock.close(() => resolve())); + } + } catch { + // ignore + } + try { + if (tmpdir && !keepTmp) await rm(tmpdir, { recursive: true, force: true }); + } catch { + // ignore + } +} diff --git a/apps/app/scripts/bump-version.mjs b/apps/app/scripts/bump-version.mjs new file mode 100755 index 0000000000..0eb0dcad81 --- /dev/null +++ b/apps/app/scripts/bump-version.mjs @@ -0,0 +1,190 @@ +#!/usr/bin/env node +import { readFile, writeFile } from "node:fs/promises"; +import path from "node:path"; + +const ROOT = process.cwd(); +const REPO_ROOT = path.resolve(ROOT, "../.."); +const args = process.argv.slice(2); + +const usage = () => { + console.log(`Usage: + node scripts/bump-version.mjs patch|minor|major + node scripts/bump-version.mjs --set x.y.z + node scripts/bump-version.mjs --dry-run [patch|minor|major|--set x.y.z]`); +}; + +const isDryRun = args.includes("--dry-run"); +// pnpm forwards args to scripts with an explicit "--" separator; strip it so +// "pnpm bump:set -- 0.1.21" works as expected. +const filtered = args.filter((arg) => arg !== "--dry-run" && arg !== "--"); + +if (!filtered.length) { + usage(); + process.exit(1); +} + +let mode = filtered[0]; +let explicit = null; + +if (mode === "--set") { + explicit = filtered[1] ?? null; + if (!explicit) { + console.error("--set requires a version like 0.1.21"); + process.exit(1); + } +} + +const semverPattern = /^\d+\.\d+\.\d+$/; + +const readJson = async (filePath) => + JSON.parse(await readFile(filePath, "utf8")); + +const bump = (value, bumpMode) => { + if (!semverPattern.test(value)) { + throw new Error(`Invalid version: ${value}`); + } + const [major, minor, patch] = value.split(".").map(Number); + if (bumpMode === "major") return `${major + 1}.0.0`; + if (bumpMode === "minor") return `${major}.${minor + 1}.0`; + if (bumpMode === "patch") return `${major}.${minor}.${patch + 1}`; + throw new Error(`Unknown bump mode: ${bumpMode}`); +}; + +const targetVersion = async () => { + if (explicit) return explicit; + const pkg = await readJson(path.join(ROOT, "package.json")); + return bump(pkg.version, mode); +}; + +const updatePackageJson = async (nextVersion) => { + const uiPath = path.join(ROOT, "package.json"); + const tauriPath = path.join(REPO_ROOT, "apps", "desktop", "package.json"); + const orchestratorPath = path.join( + REPO_ROOT, + "apps", + "orchestrator", + "package.json", + ); + const serverPath = path.join(REPO_ROOT, "apps", "server", "package.json"); + const opencodeRouterPath = path.join( + REPO_ROOT, + "apps", + "opencode-router", + "package.json", + ); + const uiData = await readJson(uiPath); + const tauriData = await readJson(tauriPath); + const orchestratorData = await readJson(orchestratorPath); + const serverData = await readJson(serverPath); + const opencodeRouterData = await readJson(opencodeRouterPath); + uiData.version = nextVersion; + tauriData.version = nextVersion; + // Desktop pins opencodeRouterVersion for sidecar bundling; keep it aligned. + tauriData.opencodeRouterVersion = nextVersion; + orchestratorData.version = nextVersion; + + // Ensure openwork-orchestrator uses the same openwork-server/opencode-router versions. + orchestratorData.dependencies = orchestratorData.dependencies ?? {}; + orchestratorData.dependencies["openwork-server"] = nextVersion; + orchestratorData.dependencies["opencode-router"] = nextVersion; + + serverData.version = nextVersion; + opencodeRouterData.version = nextVersion; + if (!isDryRun) { + await writeFile(uiPath, JSON.stringify(uiData, null, 2) + "\n"); + await writeFile(tauriPath, JSON.stringify(tauriData, null, 2) + "\n"); + await writeFile( + orchestratorPath, + JSON.stringify(orchestratorData, null, 2) + "\n", + ); + await writeFile(serverPath, JSON.stringify(serverData, null, 2) + "\n"); + await writeFile( + opencodeRouterPath, + JSON.stringify(opencodeRouterData, null, 2) + "\n", + ); + } +}; + +const updateCargoToml = async (nextVersion) => { + const filePath = path.join( + REPO_ROOT, + "apps", + "desktop", + "src-tauri", + "Cargo.toml", + ); + const raw = await readFile(filePath, "utf8"); + const updated = raw.replace( + /\bversion\s*=\s*"[^"]+"/m, + `version = "${nextVersion}"`, + ); + if (!isDryRun) { + await writeFile(filePath, updated); + // Regenerate Cargo.lock so it stays in sync with the version bump. + const { execFileSync } = await import("node:child_process"); + try { + execFileSync("cargo", ["generate-lockfile"], { + cwd: path.join(REPO_ROOT, "apps", "desktop", "src-tauri"), + stdio: "ignore", + }); + } catch { + // cargo may not be installed (e.g. CI without Rust); skip silently. + } + } +}; + +const updateTauriConfig = async (nextVersion) => { + const filePath = path.join( + REPO_ROOT, + "apps", + "desktop", + "src-tauri", + "tauri.conf.json", + ); + const data = JSON.parse(await readFile(filePath, "utf8")); + data.version = nextVersion; + if (!isDryRun) { + await writeFile(filePath, JSON.stringify(data, null, 2) + "\n"); + } +}; + +const main = async () => { + if (explicit && !semverPattern.test(explicit)) { + throw new Error(`Invalid explicit version: ${explicit}`); + } + if (explicit === null && !["patch", "minor", "major"].includes(mode)) { + throw new Error(`Unknown mode: ${mode}`); + } + + const nextVersion = await targetVersion(); + await updatePackageJson(nextVersion); + await updateCargoToml(nextVersion); + await updateTauriConfig(nextVersion); + + console.log( + JSON.stringify( + { + ok: true, + version: nextVersion, + dryRun: isDryRun, + files: [ + "apps/app/package.json", + "apps/desktop/package.json", + "apps/orchestrator/package.json", + "apps/server/package.json", + "apps/opencode-router/package.json", + "apps/desktop/src-tauri/Cargo.toml", + "apps/desktop/src-tauri/tauri.conf.json", + ], + }, + null, + 2, + ), + ); +}; + +main().catch((error) => { + const message = error instanceof Error ? error.message : String(error); + console.error(JSON.stringify({ ok: false, error: message })); + process.exit(1); +}); diff --git a/apps/app/scripts/bundle-url-policy.ts b/apps/app/scripts/bundle-url-policy.ts new file mode 100644 index 0000000000..112eb8b216 --- /dev/null +++ b/apps/app/scripts/bundle-url-policy.ts @@ -0,0 +1,45 @@ +import { strict as assert } from "node:assert"; + +import { describeBundleUrlTrust, isConfiguredBundlePublisherUrl } from "../src/app/bundles/url-policy"; + +const trusted = describeBundleUrlTrust( + "https://share.openworklabs.com/b/01ARZ3NDEKTSV4RRFFQ69G5FAV", + "https://share.openworklabs.com", +); + +assert.deepEqual(trusted, { + trusted: true, + bundleId: "01ARZ3NDEKTSV4RRFFQ69G5FAV", + actualOrigin: "https://share.openworklabs.com", + configuredOrigin: "https://share.openworklabs.com", +}); + +const untrusted = describeBundleUrlTrust( + "https://evil.example/b/01ARZ3NDEKTSV4RRFFQ69G5FAV", + "https://share.openworklabs.com", +); + +assert.deepEqual(untrusted, { + trusted: false, + bundleId: "01ARZ3NDEKTSV4RRFFQ69G5FAV", + actualOrigin: "https://evil.example", + configuredOrigin: "https://share.openworklabs.com", +}); + +assert.equal( + isConfiguredBundlePublisherUrl( + "https://share.openworklabs.com/b/01ARZ3NDEKTSV4RRFFQ69G5FAV", + "https://share.openworklabs.com", + ), + true, +); + +assert.equal( + isConfiguredBundlePublisherUrl( + "https://share.openworklabs.com/not-a-bundle", + "https://share.openworklabs.com", + ), + false, +); + +console.log("bundle-url-policy ok"); diff --git a/apps/app/scripts/dev-log.ts b/apps/app/scripts/dev-log.ts new file mode 100644 index 0000000000..5791e789b2 --- /dev/null +++ b/apps/app/scripts/dev-log.ts @@ -0,0 +1,69 @@ +import assert from "node:assert/strict"; + +const { clearDevLogs, formatDevLogLine, formatDevLogText, readDevLogs, recordDevLog } = await import( + "../src/app/lib/dev-log.ts" +); + +const results = { + ok: true, + steps: [] as Array>, +}; + +async function step(name: string, fn: () => void | Promise) { + results.steps.push({ name, status: "running" }); + const index = results.steps.length - 1; + + try { + await fn(); + results.steps[index] = { name, status: "ok" }; + } catch (error) { + results.ok = false; + results.steps[index] = { + name, + status: "error", + error: error instanceof Error ? error.message : String(error), + }; + throw error; + } +} + +try { + clearDevLogs(); + + await step("disabled logging does not retain entries", () => { + recordDevLog(false, { level: "debug", source: "workspace", label: "connect:start" }); + assert.equal(readDevLogs(0).length, 0); + }); + + await step("enabled logging retains ordered entries", () => { + recordDevLog(true, { level: "debug", source: "workspace", label: "connect:start", payload: { root: "/tmp/demo" } }); + recordDevLog(true, { level: "warn", source: "session", label: "stream:error", payload: { code: 500 } }); + const logs = readDevLogs(0); + assert.equal(logs.length, 2); + assert.equal(logs[0]?.source, "workspace"); + assert.equal(logs[1]?.level, "warn"); + }); + + await step("formatted output stays readable and exportable", () => { + const line = formatDevLogLine(readDevLogs(1)[0]!); + assert.match(line, /WARN session:stream:error/); + const text = formatDevLogText(0); + assert.match(text, /DEBUG workspace:connect:start/); + assert.match(text, /WARN session:stream:error/); + }); + + console.log(JSON.stringify(results, null, 2)); +} catch (error) { + results.ok = false; + console.error( + JSON.stringify( + { + ...results, + error: error instanceof Error ? error.message : String(error), + }, + null, + 2, + ), + ); + process.exitCode = 1; +} diff --git a/scripts/e2e.mjs b/apps/app/scripts/e2e.mjs similarity index 100% rename from scripts/e2e.mjs rename to apps/app/scripts/e2e.mjs diff --git a/scripts/events.mjs b/apps/app/scripts/events.mjs similarity index 100% rename from scripts/events.mjs rename to apps/app/scripts/events.mjs diff --git a/apps/app/scripts/fs-engine.mjs b/apps/app/scripts/fs-engine.mjs new file mode 100644 index 0000000000..7fb044ffdf --- /dev/null +++ b/apps/app/scripts/fs-engine.mjs @@ -0,0 +1,54 @@ +import assert from "node:assert/strict"; +import { mkdir, rm, writeFile } from "node:fs/promises"; +import path from "node:path"; + +import { + findFreePort, + makeClient, + parseArgs, + spawnOpencodeServe, + waitForHealthy, +} from "./_util.mjs"; + +const args = parseArgs(process.argv.slice(2)); +const directory = args.get("dir") ?? process.cwd(); + +const port = await findFreePort(); +const server = await spawnOpencodeServe({ directory, port }); + +try { + const client = makeClient({ baseUrl: server.baseUrl, directory: server.cwd }); + await waitForHealthy(client); + + const root = ".openwork/test-engine"; + const nestedDir = path.join(root, "nested"); + const filePath = path.join(root, "hello.txt"); + + await mkdir(path.join(directory, nestedDir), { recursive: true }); + await writeFile(path.join(directory, filePath), "openwork engine test\n", "utf8"); + + const entries = await client.file.list({ directory, path: root }); + assert.ok(entries.some((entry) => entry.name === "nested" && entry.type === "directory")); + assert.ok(entries.some((entry) => entry.name === "hello.txt" && entry.type === "file")); + + const read = await client.file.read({ directory, path: filePath }); + assert.equal(read.type, "text"); + assert.ok(read.content.includes("openwork engine test")); + + await rm(path.join(directory, root), { recursive: true, force: true }); + + console.log( + JSON.stringify({ + ok: true, + baseUrl: server.baseUrl, + directory: server.cwd, + root, + }), + ); +} catch (e) { + const message = e instanceof Error ? e.message : String(e); + console.error(JSON.stringify({ ok: false, error: message, stderr: server.getStderr() })); + process.exitCode = 1; +} finally { + await server.close(); +} diff --git a/scripts/health.mjs b/apps/app/scripts/health.mjs similarity index 100% rename from scripts/health.mjs rename to apps/app/scripts/health.mjs diff --git a/apps/app/scripts/local-file-path.mjs b/apps/app/scripts/local-file-path.mjs new file mode 100644 index 0000000000..b5a0e248aa --- /dev/null +++ b/apps/app/scripts/local-file-path.mjs @@ -0,0 +1,21 @@ +import assert from "node:assert/strict"; +import { normalizeLocalFilePath } from "../src/app/lib/local-file-path.impl.js"; + +const equals = (input, expected) => { + assert.equal(normalizeLocalFilePath(input), expected, `normalizeLocalFilePath(${input})`); +}; + +equals(" notes/todo.md ", "notes/todo.md"); +equals("file:///tmp/notes.md", "/tmp/notes.md"); +equals("file:/tmp/notes.md", "/tmp/notes.md"); +equals("file:///C:/Users/xj/note.md", "C:/Users/xj/note.md"); +equals("file://server/share/note.md", "//server/share/note.md"); +equals("file://localhost/tmp/notes.md", "/tmp/notes.md"); +equals("FILE:///tmp/notes.md", "/tmp/notes.md"); + +assert.doesNotThrow(() => normalizeLocalFilePath("file:///tmp/100%/note.md")); +equals("file:///tmp/100%/note.md", "/tmp/100%/note.md"); +assert.doesNotThrow(() => normalizeLocalFilePath("file://%zz")); +equals("file://%zz", "%zz"); + +console.log(JSON.stringify({ ok: true, checks: 11 })); diff --git a/apps/app/scripts/mention-send.mjs b/apps/app/scripts/mention-send.mjs new file mode 100644 index 0000000000..217cbdd0f2 --- /dev/null +++ b/apps/app/scripts/mention-send.mjs @@ -0,0 +1,144 @@ +import assert from "node:assert/strict"; + +import { findFreePort, makeClient, parseArgs, spawnOpencodeServe, waitForHealthy } from "./_util.mjs"; + +const args = parseArgs(process.argv.slice(2)); +const directory = args.get("dir") ?? process.cwd(); + +const port = await findFreePort(); +const server = await spawnOpencodeServe({ directory, port }); + +const results = { + ok: true, + baseUrl: server.baseUrl, + directory: server.cwd, + steps: [], +}; + +function formatError(e) { + if (e instanceof Error) return e.message; + try { + return JSON.stringify(e); + } catch { + return String(e); + } +} + +function step(name, fn) { + results.steps.push({ name, status: "running" }); + const idx = results.steps.length - 1; + return Promise.resolve() + .then(fn) + .then((data) => { + results.steps[idx] = { name, status: "ok", data }; + }) + .catch((e) => { + results.ok = false; + results.steps[idx] = { name, status: "error", error: formatError(e) }; + throw e; + }); +} + +const targetPath = args.get("path") ?? "src/app/pages/session.tsx"; +const absolutePath = (() => { + const trimmed = String(targetPath || "").trim(); + if (!trimmed) return ""; + if (trimmed.startsWith("/")) return trimmed; + if (/^[a-zA-Z]:\\/.test(trimmed)) return trimmed; + return (server.cwd + "/" + trimmed).replace("//", "/"); +})(); +const fileUrl = absolutePath ? `file://${absolutePath}` : ""; + +try { + const client = makeClient({ baseUrl: server.baseUrl, directory: server.cwd }); + await step("health", async () => waitForHealthy(client)); + + let sessionId = ""; + await step("session.create", async () => { + const session = await client.session.create({ title: "OpenWork mention-send" }); + sessionId = session.id; + assert.ok(sessionId); + return { id: session.id }; + }); + + async function messagesSummary(label) { + const msgs = await client.session.messages({ sessionID: sessionId, limit: 50 }); + const roles = msgs.map((m) => m?.role ?? m?.info?.role ?? null); + const user = msgs.filter((m) => (m?.role ?? m?.info?.role ?? null) === "user"); + const last = user[user.length - 1]; + const lastParts = Array.isArray(last?.parts) ? last.parts : []; + return { + label, + total: msgs.length, + userCount: user.length, + roles: Array.from(new Set(roles)), + lastMessageRole: msgs.length ? (msgs[msgs.length - 1]?.role ?? msgs[msgs.length - 1]?.info?.role ?? null) : null, + sampleKeys: msgs.length ? Object.keys(msgs[0] ?? {}) : [], + sample: msgs.length + ? { + role: msgs[0]?.role ?? msgs[0]?.info?.role ?? null, + parts: Array.isArray(msgs[0]?.parts) ? msgs[0].parts.map((p) => p.type) : [], + } + : null, + lastUserParts: lastParts.map((p) => p.type), + lastUserText: (lastParts.find((p) => p.type === "text")?.text ?? ""), + }; + } + + await step("messages.initial", async () => messagesSummary("initial")); + + await step("prompt.invalidFilePart", async () => { + // Mirrors the bug in OpenWork: sending a file mention with only {path}. + try { + await client.session.prompt({ + sessionID: sessionId, + noReply: true, + parts: [ + { type: "text", text: " " }, + { type: "file", path: targetPath }, + ], + }); + throw new Error("expected prompt to fail validation, but it succeeded"); + } catch (e) { + return { expectedFailure: true, error: formatError(e) }; + } + }); + + await step("prompt.spaceTextWithValidFile", async () => { + assert.ok(fileUrl, "missing file url"); + await client.session.prompt({ + sessionID: sessionId, + noReply: true, + parts: [ + { type: "text", text: " " }, + { type: "file", mime: "text/plain", url: fileUrl, filename: "session.tsx" }, + ], + }); + return messagesSummary("after-space-text"); + }); + + await step("prompt.fixedPayload", async () => { + assert.ok(fileUrl, "missing file url"); + await client.session.prompt({ + sessionID: sessionId, + noReply: true, + parts: [ + { type: "text", text: `@${targetPath}` }, + { type: "file", mime: "text/plain", url: fileUrl, filename: "session.tsx" }, + ], + }); + const summary = await messagesSummary("after-fixed"); + assert.ok(summary.lastUserText.includes(targetPath), "expected last user text to include the mentioned path"); + return summary; + }); + + console.log(JSON.stringify(results, null, 2)); +} catch (e) { + results.ok = false; + results.error = formatError(e); + results.stderr = server.getStderr(); + console.error(JSON.stringify(results, null, 2)); + process.exitCode = 1; +} finally { + await server.close(); +} diff --git a/scripts/permissions.mjs b/apps/app/scripts/permissions.mjs similarity index 100% rename from scripts/permissions.mjs rename to apps/app/scripts/permissions.mjs diff --git a/apps/app/scripts/remote-workspace-diagnostics.test.ts b/apps/app/scripts/remote-workspace-diagnostics.test.ts new file mode 100644 index 0000000000..e58b4322c7 --- /dev/null +++ b/apps/app/scripts/remote-workspace-diagnostics.test.ts @@ -0,0 +1,430 @@ +import { describe, expect, test } from "bun:test"; + +import type { OpenworkServerClient } from "../src/app/lib/openwork-server"; +import type { WorkspaceInfo } from "../src/app/lib/desktop"; +import { getWorkspaceTaskLoadErrorDisplay } from "../src/app/utils"; +import { + diagnoseRemoteWorkspaceTaskLoadFailure, + getRemoteWorkspaceConnectionKey, + redactRemoteDiagnosticText, + resolveRemoteWorkspaceConnectionTarget, + testRemoteWorkspaceConnection, +} from "../src/react-app/domains/workspace/remote-workspace-diagnostics"; + +function workspace(overrides: Partial = {}): WorkspaceInfo { + return { + id: "ws_local", + name: "Remote worker", + path: "", + preset: "remote", + workspaceType: "remote", + remoteType: "openwork", + openworkHostUrl: "https://worker.example.com/w/ws_remote", + openworkToken: "ow-token", + ...overrides, + }; +} + +function client(overrides: Partial = {}): OpenworkServerClient { + return { + baseUrl: "https://worker.example.com/w/ws_remote", + token: "ow-token", + health: async () => ({ ok: true, version: "0.1.0", uptimeMs: 10 }), + status: async () => ({ + ok: true, + version: "0.1.0", + uptimeMs: 10, + readOnly: false, + approval: { mode: "manual", timeoutMs: 30_000 }, + corsOrigins: [], + workspaceCount: 1, + activeWorkspaceId: "ws_remote", + selectedWorkspaceId: "ws_remote", + workspace: { + id: "ws_remote", + name: "Worker project", + path: "/workspace", + preset: "starter", + workspaceType: "local", + }, + authorizedRoots: ["/workspace"], + server: { host: "127.0.0.1", port: 8787 }, + tokenSource: { client: "file", host: "file" }, + }), + capabilities: async () => ({ + skills: { read: true, write: true, source: "openwork" }, + plugins: { read: true, write: true }, + mcp: { read: true, write: true }, + commands: { read: true, write: true }, + config: { read: true, write: true }, + }), + listWorkspaces: async () => ({ + items: [ + { + id: "ws_remote", + name: "Worker project", + path: "/workspace", + preset: "starter", + workspaceType: "local", + }, + ], + activeId: "ws_remote", + }), + ...overrides, + } as OpenworkServerClient; +} + +function serverError(status: number, code: string, message: string) { + return Object.assign(new Error(message), { status, code }); +} + +describe("resolveRemoteWorkspaceConnectionTarget", () => { + test("builds a workspace-scoped OpenWork target from saved worker credentials", () => { + const target = resolveRemoteWorkspaceConnectionTarget( + workspace({ + openworkHostUrl: "https://worker.example.com", + openworkWorkspaceId: "ws_remote", + }), + ); + + expect(target.ok).toBe(true); + if (!target.ok) return; + expect(target.target.baseUrl).toBe("https://worker.example.com/w/ws_remote"); + expect(target.target.workspaceId).toBe("ws_remote"); + expect(target.target.token).toBe("ow-token"); + }); + + test("parses workspace id from a workspace-scoped connect URL", () => { + const target = resolveRemoteWorkspaceConnectionTarget(workspace()); + + expect(target.ok).toBe(true); + if (!target.ok) return; + expect(target.target.workspaceId).toBe("ws_remote"); + expect(target.target.baseUrl).toBe("https://worker.example.com/w/ws_remote"); + }); + + test("fails fast when a remote worker has no endpoint", () => { + const target = resolveRemoteWorkspaceConnectionTarget( + workspace({ + openworkHostUrl: "", + baseUrl: "", + }), + ); + + expect(target.ok).toBe(false); + if (target.ok) return; + expect(target.state.status).toBe("error"); + expect(target.state.message).toContain("URL is missing"); + }); + + test("fails fast when a remote worker endpoint is invalid", () => { + const target = resolveRemoteWorkspaceConnectionTarget( + workspace({ + openworkHostUrl: "not a url", + }), + ); + + expect(target.ok).toBe(false); + if (target.ok) return; + expect(target.state.status).toBe("error"); + expect(target.state.message).toContain("URL is invalid"); + }); + + test("does not run OpenWork probes against non-OpenWork remote workspaces", () => { + const target = resolveRemoteWorkspaceConnectionTarget( + workspace({ + remoteType: "opencode", + openworkHostUrl: "", + openworkToken: "", + baseUrl: "https://opencode.example.com", + }), + ); + + expect(target.ok).toBe(false); + if (target.ok) return; + expect(target.state.status).toBe("error"); + expect(target.state.message).toContain("OpenWork remote workers"); + }); + + test("does not run OpenWork probes against stale OpenWork fields on non-OpenWork remotes", () => { + const target = resolveRemoteWorkspaceConnectionTarget( + workspace({ + remoteType: "opencode", + openworkHostUrl: "https://worker.example.com/w/ws_remote", + openworkToken: "owt_secret", + baseUrl: "https://opencode.example.com", + }), + ); + + expect(target.ok).toBe(false); + if (target.ok) return; + expect(target.state.message).toContain("OpenWork remote workers"); + }); +}); + +describe("testRemoteWorkspaceConnection", () => { + test("returns a connected state after health, token, capabilities, and workspace checks pass", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + now: () => 123, + createClient: () => client(), + }); + + expect(result.ok).toBe(true); + expect(result.state).toEqual({ + status: "connected", + message: "Connected to Worker project.", + checkedAt: 123, + }); + }); + + test("reports a missing token after proving the worker endpoint is reachable", async () => { + const result = await testRemoteWorkspaceConnection(workspace({ openworkToken: "" }), { + createClient: () => client(), + }); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("Token is missing"); + }); + + test("reports unhealthy health responses as endpoint failures", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + createClient: () => + client({ + health: async () => ({ ok: false, version: "0.1.0", uptimeMs: 10 }), + }), + }); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("unhealthy response"); + }); + + test("uses fallback OpenWork tokens saved on older workspace records", async () => { + const result = await testRemoteWorkspaceConnection( + workspace({ + openworkToken: "", + openworkClientToken: "legacy-client-token", + }), + { + createClient: (target) => { + expect(target.token).toBe("legacy-client-token"); + return client(); + }, + }, + ); + + expect(result.ok).toBe(true); + }); + + test("reports rejected credentials without hiding the endpoint", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + createClient: () => + client({ + capabilities: async () => { + throw serverError(401, "invalid_token", "Invalid token"); + }, + }), + }); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("Token was rejected by worker.example.com"); + }); + + test("reports a missing workspace separately from a dead worker", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + createClient: () => + client({ + status: async () => { + throw serverError(404, "workspace_not_found", "Workspace not found"); + }, + }), + }); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("Workspace ws_remote was not found"); + }); + + test("uses workspace list when the saved remote target is not workspace-scoped", async () => { + const result = await testRemoteWorkspaceConnection( + workspace({ + openworkHostUrl: "https://worker.example.com", + openworkWorkspaceId: "", + baseUrl: "", + }), + { + createClient: (target) => { + expect(target.baseUrl).toBe("https://worker.example.com"); + expect(target.workspaceId).toBe(null); + return client({ + status: async () => { + throw new Error("status should not be called"); + }, + }); + }, + }, + ); + + expect(result.ok).toBe(true); + expect(result.state.message).toBe("Connected to Worker project."); + }); + + test("reports rejected credentials from the workspace list fallback", async () => { + const result = await testRemoteWorkspaceConnection( + workspace({ + openworkHostUrl: "https://worker.example.com", + openworkWorkspaceId: "", + baseUrl: "", + }), + { + createClient: () => + client({ + listWorkspaces: async () => { + throw serverError(401, "invalid_token", "Invalid token"); + }, + }), + }, + ); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("Token was rejected by worker.example.com"); + }); + + test("reports unauthorized workspace status separately from bad credentials", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + createClient: () => + client({ + status: async () => { + throw serverError(403, "forbidden", "Forbidden"); + }, + }), + }); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("is not authorized"); + }); + + test("reports endpoint reachability failures from the health probe", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + createClient: () => + client({ + health: async () => { + throw new Error("Failed to fetch"); + }, + }), + }); + + expect(result.ok).toBe(false); + expect(result.state.status).toBe("error"); + expect(result.state.message).toContain("Cannot reach worker.example.com"); + }); + + test("redacts token-like values from diagnostic error messages", async () => { + const result = await testRemoteWorkspaceConnection(workspace(), { + createClient: () => + client({ + health: async () => { + throw new Error("Failed with Bearer owt_live_secret and ?token=abc123"); + }, + }), + }); + + expect(result.ok).toBe(false); + expect(result.state.message).toContain("Bearer [redacted]"); + expect(result.state.message).toContain("?token=[redacted]"); + expect(result.state.message).not.toContain("owt_live_secret"); + expect(result.state.message).not.toContain("abc123"); + }); +}); + +describe("remote diagnostic identity", () => { + test("redacts common token shapes", () => { + const redacted = redactRemoteDiagnosticText( + "Authorization: Bearer abc.def and https://x.test/?access_token=secret&ok=1 and owt_live_secret", + ); + + expect(redacted).toContain("Authorization: Bearer [redacted]"); + expect(redacted).toContain("?access_token=[redacted]&ok=1"); + expect(redacted).toContain("owt_[redacted]"); + expect(redacted).not.toContain("abc.def"); + expect(redacted).not.toContain("secret"); + expect(redacted).not.toContain("owt_live_secret"); + }); + + test("changes when connection credentials change", () => { + const before = getRemoteWorkspaceConnectionKey(workspace({ openworkToken: "old-token" })); + const after = getRemoteWorkspaceConnectionKey(workspace({ openworkToken: "new-token" })); + + expect(before).not.toBe(after); + }); +}); + +describe("diagnoseRemoteWorkspaceTaskLoadFailure", () => { + test("keeps the task load error when the worker itself is reachable", async () => { + const state = await diagnoseRemoteWorkspaceTaskLoadFailure( + workspace(), + "Session list failed", + { + now: () => 456, + createClient: () => client(), + }, + ); + + expect(state).toEqual({ + status: "error", + message: "Worker is reachable, but tasks failed to load: Session list failed", + checkedAt: 456, + }); + }); + + test("prefers the blocking connection diagnostic when the worker is unreachable", async () => { + const state = await diagnoseRemoteWorkspaceTaskLoadFailure( + workspace(), + "Session list failed", + { + createClient: () => + client({ + health: async () => { + throw new Error("Failed to fetch"); + }, + }), + }, + ); + + expect(state.status).toBe("error"); + expect(state.message).toContain("Cannot reach worker.example.com"); + }); + + test("redacts token-like values from task load fallbacks", async () => { + const state = await diagnoseRemoteWorkspaceTaskLoadFailure( + workspace(), + "Session failed with bearer owt_live_secret and ?token=abc123", + { + createClient: () => client(), + }, + ); + + expect(state.message).toContain("bearer [redacted]"); + expect(state.message).toContain("?token=[redacted]"); + expect(state.message).not.toContain("owt_live_secret"); + expect(state.message).not.toContain("abc123"); + }); +}); + +describe("getWorkspaceTaskLoadErrorDisplay", () => { + test("redacts remote worker task load errors before rendering", () => { + const display = getWorkspaceTaskLoadErrorDisplay( + workspace(), + "failed with Authorization: Bearer owt_live_secret and ?token=abc123", + ); + + expect(display.message).toContain("Authorization: Bearer [redacted]"); + expect(display.message).toContain("?token=[redacted]"); + expect(display.message).not.toContain("owt_live_secret"); + expect(display.message).not.toContain("abc123"); + }); +}); diff --git a/apps/app/scripts/select-session-debug.mjs b/apps/app/scripts/select-session-debug.mjs new file mode 100644 index 0000000000..f8b4bc1bcd --- /dev/null +++ b/apps/app/scripts/select-session-debug.mjs @@ -0,0 +1,90 @@ +import assert from "node:assert/strict"; + +import { + findFreePort, + makeClient, + parseArgs, + spawnOpencodeServe, + waitForHealthy, +} from "./_util.mjs"; + +const args = parseArgs(process.argv.slice(2)); +const directory = args.get("dir") ?? process.cwd(); +const baseUrlOverride = args.get("baseUrl") ?? null; +const count = Number.parseInt(args.get("count") ?? "2", 10); +const sessionIdOverride = args.get("session") ?? null; + +const withTiming = async (label, fn) => { + const start = Date.now(); + try { + const result = await fn(); + const elapsed = Date.now() - start; + return { ok: true, label, elapsed, result }; + } catch (error) { + const elapsed = Date.now() - start; + return { ok: false, label, elapsed, error: error instanceof Error ? error.message : String(error) }; + } +}; + +let server = null; + +try { + if (!baseUrlOverride) { + const port = await findFreePort(); + server = await spawnOpencodeServe({ directory, port }); + } + + const baseUrl = baseUrlOverride ?? server.baseUrl; + const client = makeClient({ baseUrl, directory: server?.cwd ?? directory }); + + await waitForHealthy(client); + + console.log( + JSON.stringify({ + ok: true, + baseUrl, + directory: server?.cwd ?? directory, + count, + sessionIdOverride, + }), + ); + + for (let i = 0; i < count; i += 1) { + console.log(`\n=== Iteration ${i + 1}/${count} ===`); + + const health = await withTiming("global.health", async () => client.global.health()); + console.log(JSON.stringify(health)); + + let sessionId = sessionIdOverride; + if (!sessionId) { + const create = await withTiming("session.create", async () => + client.session.create({ title: `Debug session ${i + 1}`, directory }), + ); + console.log(JSON.stringify(create)); + assert.ok(create.ok, "session.create failed"); + sessionId = create.result.id; + } + + const list = await withTiming("session.list", async () => client.session.list({ limit: 50 })); + console.log(JSON.stringify(list)); + + const messages = await withTiming("session.messages", async () => + client.session.messages({ sessionID: sessionId, limit: 50 }), + ); + console.log(JSON.stringify(messages)); + + const todos = await withTiming("session.todo", async () => client.session.todo({ sessionID: sessionId })); + console.log(JSON.stringify(todos)); + + const permissions = await withTiming("permission.list", async () => client.permission.list()); + console.log(JSON.stringify(permissions)); + } +} catch (e) { + const message = e instanceof Error ? e.message : String(e); + console.error(JSON.stringify({ ok: false, error: message, stderr: server?.getStderr?.() ?? null })); + process.exitCode = 1; +} finally { + if (server) { + await server.close(); + } +} diff --git a/apps/app/scripts/session-error-recovery.ts b/apps/app/scripts/session-error-recovery.ts new file mode 100644 index 0000000000..85bc60907f --- /dev/null +++ b/apps/app/scripts/session-error-recovery.ts @@ -0,0 +1,95 @@ +import assert from "node:assert/strict"; + +import { + latestSessionErrorTurnTime, + shouldResetRunState, +} from "../src/react-app/domains/session/sync/run-state"; + +assert.equal(latestSessionErrorTurnTime([]), null); +assert.equal( + latestSessionErrorTurnTime([ + { id: "session-error:test:0", text: "older", afterMessageID: null, time: 10 }, + { id: "session-error:test:1", text: "latest", afterMessageID: "message-1", time: 25 }, + ]), + 25, +); + +assert.equal( + shouldResetRunState({ + hasError: false, + sessionStatus: "idle", + runHasBegun: true, + runStartedAt: 100, + latestErrorTurnTime: null, + }), + true, +); + +assert.equal( + shouldResetRunState({ + hasError: false, + sessionStatus: "idle", + runHasBegun: false, + runStartedAt: 100, + latestErrorTurnTime: 120, + }), + true, +); + +assert.equal( + shouldResetRunState({ + hasError: false, + sessionStatus: "idle", + runHasBegun: false, + runStartedAt: 100, + latestErrorTurnTime: 80, + }), + false, +); + +assert.equal( + shouldResetRunState({ + hasError: false, + sessionStatus: "running", + runHasBegun: false, + runStartedAt: 100, + latestErrorTurnTime: 120, + }), + false, +); + +assert.equal( + shouldResetRunState({ + hasError: false, + sessionStatus: "idle", + runHasBegun: false, + runStartedAt: null, + latestErrorTurnTime: 120, + }), + false, +); + +assert.equal( + shouldResetRunState({ + hasError: true, + sessionStatus: "idle", + runHasBegun: false, + runStartedAt: 100, + latestErrorTurnTime: null, + }), + true, +); + +console.log( + JSON.stringify({ + ok: true, + cases: [ + "picks latest session error turn", + "resets completed runs when the session returns idle", + "resets immediate failed sends after a synthetic session error turn", + "resets immediate failures that only surface a session-level error banner", + "ignores stale prior errors", + "does not reset while the session is still active", + ], + }), +); diff --git a/apps/app/scripts/session-render-state.test.ts b/apps/app/scripts/session-render-state.test.ts new file mode 100644 index 0000000000..1ac2f612ba --- /dev/null +++ b/apps/app/scripts/session-render-state.test.ts @@ -0,0 +1,177 @@ +import { describe, expect, it } from "bun:test"; +import type { UIMessage } from "ai"; + +import type { OpenworkSessionSnapshot } from "../src/app/lib/openwork-server"; +import { + deriveRenderedSessionMessages, + resolveRenderedSessionSnapshot, +} from "../src/react-app/domains/session/surface/session-render-state"; +import { mergeSnapshotIntoCachedMessages } from "../src/react-app/domains/session/sync/message-merge"; + +function snapshotWithMessages( + messages: Array<{ id: string; role: "user" | "assistant"; text: string }>, + sessionId = "ses_test", +): OpenworkSessionSnapshot { + return { + session: { + id: sessionId, + parentID: undefined, + title: "Test session", + time: { created: 1, updated: 2 }, + share: undefined, + version: "0", + }, + messages: messages.map((message, index) => ({ + info: { + id: message.id, + role: message.role, + sessionID: sessionId, + time: { created: index + 1 }, + }, + parts: [ + { + id: `part_${message.id}`, + type: "text", + text: message.text, + sessionID: sessionId, + messageID: message.id, + }, + ], + })), + todos: [], + status: { type: "idle" }, + } as unknown as OpenworkSessionSnapshot; +} + +function uiMessage(id: string, role: "user" | "assistant", text: string): UIMessage { + return { + id, + role, + parts: [{ type: "text", text, state: "done" }], + }; +} + +function snapshotWithText(text: string, sessionId = "ses_test"): OpenworkSessionSnapshot { + return snapshotWithMessages([{ id: "msg_user", role: "user", text }], sessionId); +} + +describe("mergeSnapshotIntoCachedMessages", () => { + it("keeps older cached messages when a busy snapshot only contains the active tail", () => { + const merged = mergeSnapshotIntoCachedMessages( + [uiMessage("msg_current_user", "user", "latest prompt")], + [ + uiMessage("msg_old_user", "user", "old prompt"), + uiMessage("msg_old_assistant", "assistant", "old answer"), + uiMessage("msg_current_user", "user", "latest"), + ], + ); + + expect(merged.map((message) => message.id)).toEqual([ + "msg_old_user", + "msg_old_assistant", + "msg_current_user", + ]); + expect(merged[2]?.parts[0]).toMatchObject({ text: "latest prompt" }); + }); +}); + +describe("deriveRenderedSessionMessages", () => { + it("falls back to snapshot messages when transcript cache is empty", () => { + const messages = deriveRenderedSessionMessages({ + transcriptState: [], + snapshot: snapshotWithText("still here"), + }); + + expect(messages).toHaveLength(1); + expect(messages[0]?.parts[0]).toMatchObject({ + type: "text", + text: "still here", + }); + }); + + it("keeps live transcript cache when it covers the snapshot", () => { + const cached: UIMessage[] = [ + { + id: "msg_user", + role: "assistant", + parts: [{ type: "text", text: "live text", state: "done" }], + }, + ]; + + expect(deriveRenderedSessionMessages({ + transcriptState: cached, + snapshot: snapshotWithText("snapshot text"), + })).toBe(cached); + }); + + it("keeps snapshot history visible when the live cache only has the active turn", () => { + const messages = deriveRenderedSessionMessages({ + transcriptState: [ + { + id: "msg_current_user", + role: "user", + parts: [{ type: "text", text: "latest prompt", state: "done" }], + }, + { + id: "msg_current_assistant", + role: "assistant", + parts: [{ type: "text", text: "streaming answer", state: "streaming" }], + }, + ], + snapshot: snapshotWithMessages([ + { id: "msg_old_user", role: "user", text: "old prompt" }, + { id: "msg_old_assistant", role: "assistant", text: "old answer" }, + ]), + includeLiveOnlyMessages: true, + }); + + expect(messages.map((message) => message.id)).toEqual([ + "msg_old_user", + "msg_old_assistant", + "msg_current_user", + "msg_current_assistant", + ]); + }); + + it("returns an empty list only when there is no cache or snapshot content", () => { + expect(deriveRenderedSessionMessages({ + transcriptState: [], + snapshot: null, + })).toEqual([]); + }); + + it("does not use a cached snapshot from a different session", () => { + const snapshot = resolveRenderedSessionSnapshot({ + sessionId: "ses_next", + currentSnapshot: null, + cachedRendered: { + sessionId: "ses_previous", + snapshot: snapshotWithText("previous session", "ses_previous"), + }, + }); + + expect(snapshot).toBeNull(); + expect(deriveRenderedSessionMessages({ + transcriptState: [], + snapshot, + })).toEqual([]); + }); + + it("keeps a cached snapshot for the current session while live cache is empty", () => { + const cached = snapshotWithText("current session", "ses_current"); + const snapshot = resolveRenderedSessionSnapshot({ + sessionId: "ses_current", + currentSnapshot: null, + cachedRendered: { + sessionId: "ses_current", + snapshot: cached, + }, + }); + + expect(snapshot).toBe(cached); + expect(deriveRenderedSessionMessages({ + transcriptState: [], + snapshot, + })[0]?.parts[0]).toMatchObject({ text: "current session" }); + }); +}); diff --git a/apps/app/scripts/session-scope.ts b/apps/app/scripts/session-scope.ts new file mode 100644 index 0000000000..97899943c8 --- /dev/null +++ b/apps/app/scripts/session-scope.ts @@ -0,0 +1,254 @@ +import assert from "node:assert/strict"; + +Object.defineProperty(globalThis, "navigator", { + configurable: true, + value: { + platform: "MacIntel", + userAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_0)", + }, +}); + +const { + describeDirectoryScope, + resolveScopedClientDirectory, + scopedRootsMatch, + shouldApplyScopedSessionLoad, + shouldRedirectMissingSessionAfterScopedLoad, + toSessionTransportDirectory, +} = await import("../src/app/lib/session-scope.ts"); + +const starterRoot = "/Users/test/OpenWork/starter"; +const otherRoot = "/Users/test/OpenWork/second"; + +const results = { + ok: true, + steps: [] as Array>, +}; + +async function step(name: string, fn: () => void | Promise) { + results.steps.push({ name, status: "running" }); + const index = results.steps.length - 1; + + try { + await fn(); + results.steps[index] = { name, status: "ok" }; + } catch (error) { + results.ok = false; + results.steps[index] = { + name, + status: "error", + error: error instanceof Error ? error.message : String(error), + }; + throw error; + } +} + +try { + await step("local connect prefers explicit target root", () => { + assert.equal( + resolveScopedClientDirectory({ workspaceType: "local", targetRoot: starterRoot }), + starterRoot, + ); + assert.equal( + resolveScopedClientDirectory({ + workspaceType: "local", + directory: otherRoot, + targetRoot: starterRoot, + }), + otherRoot, + ); + }); + + await step("remote connect still waits for remote discovery", () => { + assert.equal(resolveScopedClientDirectory({ workspaceType: "remote", targetRoot: starterRoot }), ""); + }); + + await step("scope matching is stable on desktop-style paths", () => { + assert.equal(scopedRootsMatch(`${starterRoot}/`, starterRoot.toUpperCase()), true); + assert.equal(scopedRootsMatch(starterRoot, otherRoot), false); + }); + + await step("stale session loads cannot overwrite another workspace sidebar", () => { + for (let index = 0; index < 50; index += 1) { + assert.equal( + shouldApplyScopedSessionLoad({ + loadedScopeRoot: otherRoot, + workspaceRoot: starterRoot, + }), + false, + ); + } + }); + + await step("same-scope session loads still update the active workspace", () => { + assert.equal( + shouldApplyScopedSessionLoad({ + loadedScopeRoot: `${starterRoot}/`, + workspaceRoot: starterRoot, + }), + true, + ); + }); + + await step("windows create and list use the same transport directory", () => { + Object.defineProperty(globalThis, "navigator", { + configurable: true, + value: { + platform: "Win32", + userAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", + }, + }); + + const winRoot = String.raw`C:\Users\Test\OpenWork\starter`; + const transport = toSessionTransportDirectory(winRoot); + + assert.equal(transport, winRoot); + assert.equal(resolveScopedClientDirectory({ workspaceType: "local", targetRoot: winRoot }), transport); + assert.equal(resolveScopedClientDirectory({ workspaceType: "local", directory: winRoot }), transport); + + const uncRoot = String.raw`\\?\UNC\server\share\starter`; + assert.equal(toSessionTransportDirectory(uncRoot), String.raw`\\server\share\starter`); + assert.equal(describeDirectoryScope(uncRoot).normalized, "//server/share/starter"); + + const verbatimDriveRoot = String.raw`\\?\C:\Users\Test\OpenWork\starter`; + assert.equal(toSessionTransportDirectory(verbatimDriveRoot), String.raw`C:\Users\Test\OpenWork\starter`); + assert.equal(describeDirectoryScope(verbatimDriveRoot).normalized, "c:/users/test/openwork/starter"); + }); + + await step("round-trip invariant: every query path equals the create path (unix)", () => { + // Restore macOS navigator for this step. + Object.defineProperty(globalThis, "navigator", { + configurable: true, + value: { + platform: "MacIntel", + userAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_0)", + }, + }); + + const unixPaths = [ + "/Users/test/OpenWork/starter", + "/Users/test/OpenWork/starter/", + "/home/user/projects/my-app", + "/tmp/sandbox", + "/private/tmp/sandbox", + ]; + + for (const raw of unixPaths) { + const createDir = toSessionTransportDirectory(raw); + const listDir = toSessionTransportDirectory(raw); + const resolvedDir = resolveScopedClientDirectory({ workspaceType: "local", targetRoot: raw }); + assert.equal(createDir, listDir, `create vs list mismatch for: ${raw}`); + assert.equal(createDir, resolvedDir, `create vs resolved mismatch for: ${raw}`); + } + }); + + await step("round-trip invariant: every query path equals the create path (windows)", () => { + Object.defineProperty(globalThis, "navigator", { + configurable: true, + value: { + platform: "Win32", + userAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", + }, + }); + + // Use escaped strings — Bun's parser chokes on String.raw inside array literals. + const windowsPaths = [ + "C:\\Users\\Test\\OpenWork\\starter", + "C:\\Users\\Test\\OpenWork\\starter\\", + "D:\\projects\\my-app", + "\\\\server\\share\\starter", + "\\\\?\\C:\\Users\\Test\\OpenWork\\starter", + "\\\\?\\UNC\\server\\share\\starter", + ]; + + for (const raw of windowsPaths) { + const createDir = toSessionTransportDirectory(raw); + const listDir = toSessionTransportDirectory(raw); + const resolvedDir = resolveScopedClientDirectory({ workspaceType: "local", targetRoot: raw }); + assert.equal(createDir, listDir, `create vs list mismatch for: ${raw}`); + assert.equal(createDir, resolvedDir, `create vs resolved mismatch for: ${raw}`); + } + }); + + await step("idempotency: double-converting a transport directory is stable", () => { + // Restore macOS for Unix paths. + Object.defineProperty(globalThis, "navigator", { + configurable: true, + value: { + platform: "MacIntel", + userAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_0)", + }, + }); + + const samples = [ + "/Users/test/OpenWork/starter", + "/home/user/projects/my-app", + ]; + for (const raw of samples) { + const once = toSessionTransportDirectory(raw); + const twice = toSessionTransportDirectory(once); + assert.equal(once, twice, `not idempotent for unix path: ${raw}`); + } + + // Switch to Windows. + Object.defineProperty(globalThis, "navigator", { + configurable: true, + value: { + platform: "Win32", + userAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", + }, + }); + + const winSamples = [ + "C:\\Users\\Test\\OpenWork\\starter", + "\\\\server\\share\\starter", + ]; + for (const raw of winSamples) { + const once = toSessionTransportDirectory(raw); + const twice = toSessionTransportDirectory(once); + assert.equal(once, twice, `not idempotent for win path: ${raw}`); + } + }); + + await step("route guard only redirects when the loaded scope matches", () => { + assert.equal( + shouldRedirectMissingSessionAfterScopedLoad({ + loadedScopeRoot: otherRoot, + workspaceRoot: starterRoot, + hasMatchingSession: false, + }), + false, + ); + assert.equal( + shouldRedirectMissingSessionAfterScopedLoad({ + loadedScopeRoot: starterRoot, + workspaceRoot: starterRoot, + hasMatchingSession: false, + }), + true, + ); + assert.equal( + shouldRedirectMissingSessionAfterScopedLoad({ + loadedScopeRoot: starterRoot, + workspaceRoot: starterRoot, + hasMatchingSession: true, + }), + false, + ); + }); + + console.log(JSON.stringify(results, null, 2)); +} catch (error) { + results.ok = false; + console.error( + JSON.stringify( + { + ...results, + error: error instanceof Error ? error.message : String(error), + }, + null, + 2, + ), + ); + process.exitCode = 1; +} diff --git a/apps/app/scripts/session-switch.mjs b/apps/app/scripts/session-switch.mjs new file mode 100644 index 0000000000..694f65cca6 --- /dev/null +++ b/apps/app/scripts/session-switch.mjs @@ -0,0 +1,151 @@ +import assert from "node:assert/strict"; + +import { + findFreePort, + makeClient, + parseArgs, + spawnOpencodeServe, + waitForHealthy, +} from "./_util.mjs"; + +const args = parseArgs(process.argv.slice(2)); +const directory = args.get("dir") ?? process.cwd(); + +const port = await findFreePort(); +const server = await spawnOpencodeServe({ directory, port }); + +const results = { + ok: true, + baseUrl: server.baseUrl, + directory: server.cwd, + steps: [], +}; + +function step(name, fn) { + results.steps.push({ name, status: "running" }); + const idx = results.steps.length - 1; + + return Promise.resolve() + .then(fn) + .then((data) => { + results.steps[idx] = { name, status: "ok", data }; + }) + .catch((e) => { + results.ok = false; + results.steps[idx] = { + name, + status: "error", + error: e instanceof Error ? e.message : String(e), + }; + throw e; + }); +} + +function getMessageSessionId(message) { + if (message && typeof message.sessionID === "string") return message.sessionID; + if (message && message.info && typeof message.info.sessionID === "string") return message.info.sessionID; + return null; +} + +function extractLastText(messages) { + const list = Array.isArray(messages) ? messages.slice() : []; + for (let i = list.length - 1; i >= 0; i -= 1) { + const msg = list[i]; + const parts = Array.isArray(msg?.parts) ? msg.parts : []; + for (let p = parts.length - 1; p >= 0; p -= 1) { + const part = parts[p]; + if (part && part.type === "text" && typeof part.text === "string") { + return part.text; + } + } + } + return null; +} + +try { + const client = makeClient({ baseUrl: server.baseUrl, directory: server.cwd }); + await waitForHealthy(client); + + let sessionA; + let sessionB; + + await step("session.create A", async () => { + sessionA = await client.session.create({ title: "OpenWork session A" }); + assert.ok(sessionA?.id); + return { id: sessionA.id }; + }); + + await step("session.create B", async () => { + sessionB = await client.session.create({ title: "OpenWork session B" }); + assert.ok(sessionB?.id); + return { id: sessionB.id }; + }); + + await step("session.prompt A", async () => { + await client.session.prompt({ + sessionID: sessionA.id, + noReply: true, + parts: [{ type: "text", text: "Hello from session A" }], + }); + return { sessionID: sessionA.id }; + }); + + await step("session.prompt B", async () => { + await client.session.prompt({ + sessionID: sessionB.id, + noReply: true, + parts: [{ type: "text", text: "Hello from session B" }], + }); + return { sessionID: sessionB.id }; + }); + + await step("session.messages A", async () => { + const messages = await client.session.messages({ sessionID: sessionA.id, limit: 50 }); + assert.ok(Array.isArray(messages)); + for (const msg of messages) { + const msgSessionId = getMessageSessionId(msg); + assert.equal(msgSessionId, sessionA.id); + } + const text = extractLastText(messages); + assert.ok(text && text.includes("session A")); + return { count: messages.length }; + }); + + await step("session.messages B", async () => { + const messages = await client.session.messages({ sessionID: sessionB.id, limit: 50 }); + assert.ok(Array.isArray(messages)); + for (const msg of messages) { + const msgSessionId = getMessageSessionId(msg); + assert.equal(msgSessionId, sessionB.id); + } + const text = extractLastText(messages); + assert.ok(text && text.includes("session B")); + return { count: messages.length }; + }); + + await step("session.messages switch", async () => { + const [messagesA, messagesB] = await Promise.all([ + client.session.messages({ sessionID: sessionA.id, limit: 50 }), + client.session.messages({ sessionID: sessionB.id, limit: 50 }), + ]); + + const textA = extractLastText(messagesA); + const textB = extractLastText(messagesB); + + assert.ok(textA && textA.includes("session A")); + assert.ok(textB && textB.includes("session B")); + + return { aCount: messagesA.length, bCount: messagesB.length }; + }); + + console.log(JSON.stringify(results, null, 2)); +} catch (e) { + const message = e instanceof Error ? e.message : String(e); + results.ok = false; + results.error = message; + results.stderr = server.getStderr(); + console.error(JSON.stringify(results, null, 2)); + process.exitCode = 1; +} finally { + await server.close(); +} diff --git a/apps/app/scripts/sessions-parallel.mjs b/apps/app/scripts/sessions-parallel.mjs new file mode 100644 index 0000000000..45dc072a2e --- /dev/null +++ b/apps/app/scripts/sessions-parallel.mjs @@ -0,0 +1,80 @@ +import assert from "node:assert/strict"; + +import { + findFreePort, + makeClient, + parseArgs, + spawnOpencodeServe, + waitForHealthy, +} from "./_util.mjs"; + +const args = parseArgs(process.argv.slice(2)); +const directory = args.get("dir") ?? process.cwd(); +const count = parseInt(args.get("count") ?? "5", 10); + +const port = await findFreePort(); +const server = await spawnOpencodeServe({ + directory, + port, +}); + +try { + const client = makeClient({ baseUrl: server.baseUrl, directory: server.cwd }); + await waitForHealthy(client); + + console.log(`Creating ${count} sessions in parallel...`); + + const results = await Promise.all( + Array.from({ length: count }, async (_, i) => { + const start = Date.now(); + const label = `session-${i + 1}`; + console.log(`[${label}] starting...`); + + try { + const session = await client.session.create({ title: `Parallel session ${i + 1}` }); + const elapsed = Date.now() - start; + console.log(`[${label}] created in ${elapsed}ms - ${session.id}`); + return { label, ok: true, elapsed, id: session.id }; + } catch (err) { + const elapsed = Date.now() - start; + console.log(`[${label}] FAILED in ${elapsed}ms - ${err.message}`); + return { label, ok: false, elapsed, error: err.message }; + } + }) + ); + + const successful = results.filter((r) => r.ok); + const failed = results.filter((r) => !r.ok); + const times = successful.map((r) => r.elapsed); + const avg = times.length ? (times.reduce((a, b) => a + b, 0) / times.length).toFixed(0) : "N/A"; + const max = times.length ? Math.max(...times) : "N/A"; + const min = times.length ? Math.min(...times) : "N/A"; + + console.log("\n--- Summary ---"); + console.log(`Total: ${count}, Success: ${successful.length}, Failed: ${failed.length}`); + console.log(`Times (ms): min=${min}, avg=${avg}, max=${max}`); + + // Now test sequential creates after the parallel burst + console.log("\nNow creating 3 more sessions sequentially..."); + for (let i = 0; i < 3; i++) { + const start = Date.now(); + const session = await client.session.create({ title: `Sequential session ${i + 1}` }); + const elapsed = Date.now() - start; + console.log(`[sequential-${i + 1}] created in ${elapsed}ms - ${session.id}`); + } + + console.log( + JSON.stringify({ + ok: true, + baseUrl: server.baseUrl, + parallelResults: results, + stats: { count, successful: successful.length, failed: failed.length, min, avg, max }, + }), + ); +} catch (e) { + const message = e instanceof Error ? e.message : String(e); + console.error(JSON.stringify({ ok: false, error: message, stderr: server.getStderr() })); + process.exitCode = 1; +} finally { + await server.close(); +} diff --git a/scripts/sessions.mjs b/apps/app/scripts/sessions.mjs similarity index 100% rename from scripts/sessions.mjs rename to apps/app/scripts/sessions.mjs diff --git a/scripts/todos.mjs b/apps/app/scripts/todos.mjs similarity index 100% rename from scripts/todos.mjs rename to apps/app/scripts/todos.mjs diff --git a/apps/app/src/app/bundles/apply.ts b/apps/app/src/app/bundles/apply.ts new file mode 100644 index 0000000000..f3530b1c40 --- /dev/null +++ b/apps/app/src/app/bundles/apply.ts @@ -0,0 +1,87 @@ +import type { WorkspaceDisplay } from "../types"; +import { parseOpenworkWorkspaceIdFromUrl } from "../lib/openwork-server"; +import type { WorkspaceInfo } from "../lib/desktop"; +import type { BundleImportTarget, BundleV1 } from "./types"; + +export function buildImportPayloadFromBundle(bundle: BundleV1): { + payload: Record; + importedSkillsCount: number; +} { + if (bundle.type === "skill") { + return { + payload: { + mode: { skills: "merge" }, + skills: [ + { + name: bundle.name, + description: bundle.description, + trigger: bundle.trigger, + content: bundle.content, + }, + ], + }, + importedSkillsCount: 1, + }; + } + + if (bundle.type === "skills-set") { + return { + payload: { + mode: { skills: "merge" }, + skills: bundle.skills.map((skill) => ({ + name: skill.name, + description: skill.description, + trigger: skill.trigger, + content: skill.content, + })), + }, + importedSkillsCount: bundle.skills.length, + }; + } + + throw new Error(`Unsupported bundle type: ${(bundle as { type?: string }).type || "unknown"}`); +} + +export function isBundleImportWorkspace(workspace: WorkspaceDisplay | WorkspaceInfo | null): boolean { + if (!workspace?.id?.trim()) return false; + if (workspace.workspaceType === "local") { + return Boolean(workspace.path?.trim()); + } + return Boolean(workspace.remoteType === "openwork" || workspace.openworkHostUrl?.trim() || workspace.openworkWorkspaceId?.trim()); +} + +export function resolveBundleImportTargetForWorkspace( + workspace: WorkspaceDisplay | WorkspaceInfo | null, +): BundleImportTarget | undefined { + if (!workspace) return undefined; + if (workspace.workspaceType === "local") { + const localRoot = workspace.path?.trim() ?? ""; + return localRoot ? { localRoot } : undefined; + } + + const workspaceId = + workspace.openworkWorkspaceId?.trim() || + parseOpenworkWorkspaceIdFromUrl(workspace.openworkHostUrl ?? "") || + parseOpenworkWorkspaceIdFromUrl(workspace.baseUrl ?? "") || + null; + const directoryHint = workspace.directory?.trim() || workspace.path?.trim() || null; + if (workspaceId || directoryHint) { + return { + workspaceId, + directoryHint, + }; + } + return undefined; +} + +export function describeWorkspaceForBundleToasts(workspace: WorkspaceDisplay | WorkspaceInfo | null): string { + return ( + workspace?.displayName?.trim() || + workspace?.openworkWorkspaceName?.trim() || + workspace?.name?.trim() || + workspace?.directory?.trim() || + workspace?.path?.trim() || + workspace?.baseUrl?.trim() || + "the selected worker" + ); +} diff --git a/apps/app/src/app/bundles/index.ts b/apps/app/src/app/bundles/index.ts new file mode 100644 index 0000000000..002cd2ab99 --- /dev/null +++ b/apps/app/src/app/bundles/index.ts @@ -0,0 +1,5 @@ +export * from "./apply"; +export * from "./publish"; +export * from "./schema"; +export * from "./sources"; +export * from "./types"; diff --git a/apps/app/src/app/bundles/publish.ts b/apps/app/src/app/bundles/publish.ts new file mode 100644 index 0000000000..a6e2ab6560 --- /dev/null +++ b/apps/app/src/app/bundles/publish.ts @@ -0,0 +1,42 @@ +import type { + OpenworkServerClient, + OpenworkWorkspaceExport, +} from "../lib/openwork-server"; +import type { SkillsSetBundleV1 } from "./types"; + +export function buildSkillsSetBundle( + workspaceName: string, + exported: OpenworkWorkspaceExport, +): SkillsSetBundleV1 { + const skills = Array.isArray(exported.skills) ? exported.skills : []; + if (!skills.length) { + throw new Error("No skills found in this workspace."); + } + + return { + schemaVersion: 1, + type: "skills-set", + name: `${workspaceName} skills`, + description: "Complete skills set from an OpenWork workspace.", + skills: skills.map((skill) => ({ + name: skill.name, + description: skill.description, + trigger: skill.trigger, + content: skill.content, + })), + }; +} + +export async function publishSkillsSetBundleFromWorkspace(input: { + client: OpenworkServerClient; + workspaceId: string; + workspaceName: string; +}) { + const exported = await input.client.exportWorkspace(input.workspaceId, { + sensitiveMode: "exclude", + }); + const payload = buildSkillsSetBundle(input.workspaceName, exported); + return input.client.publishBundle(payload, "skills-set", { + name: payload.name, + }); +} diff --git a/apps/app/src/app/bundles/schema.ts b/apps/app/src/app/bundles/schema.ts new file mode 100644 index 0000000000..e2654fae5a --- /dev/null +++ b/apps/app/src/app/bundles/schema.ts @@ -0,0 +1,95 @@ +import type { + BundleImportSummary, + BundleV1, + SkillBundleItem, +} from "./types"; + +function readRecord(value: unknown): Record | null { + if (!value || typeof value !== "object" || Array.isArray(value)) return null; + return value as Record; +} + +function readSkillItem(value: unknown): SkillBundleItem | null { + const record = readRecord(value); + if (!record) return null; + const name = typeof record.name === "string" ? record.name.trim() : ""; + const content = typeof record.content === "string" ? record.content : ""; + if (!name || !content) return null; + return { + name, + description: typeof record.description === "string" ? record.description : undefined, + trigger: typeof record.trigger === "string" ? record.trigger : undefined, + content, + }; +} + +export function describeBundleImport(bundle: BundleV1): BundleImportSummary { + if (bundle.type === "skill") { + return { + title: "Import 1 skill", + description: bundle.description?.trim() || `Add \`${bundle.name}\` to an existing worker or create a new one for it.`, + items: [bundle.name], + }; + } + + if (bundle.type === "skills-set") { + const count = bundle.skills.length; + return { + title: `Import ${count} skill${count === 1 ? "" : "s"}`, + description: + bundle.description?.trim() || + `${bundle.name || "Shared skills"} is ready to import into an existing worker or a new worker.`, + items: bundle.skills.map((skill) => skill.name), + }; + } + + throw new Error(`Unsupported bundle type: ${(bundle as { type?: string }).type || "unknown"}`); +} + +export function parseBundlePayload(value: unknown): BundleV1 { + const record = readRecord(value); + if (!record) { + throw new Error("Invalid bundle payload."); + } + + const schemaVersion = typeof record.schemaVersion === "number" ? record.schemaVersion : null; + const type = typeof record.type === "string" ? record.type.trim() : ""; + const name = typeof record.name === "string" ? record.name.trim() : ""; + + if (schemaVersion !== 1) { + throw new Error("Unsupported bundle schema version."); + } + + if (type === "skill") { + const content = typeof record.content === "string" ? record.content : ""; + if (!name || !content) { + throw new Error("Invalid skill bundle payload."); + } + return { + schemaVersion: 1, + type: "skill", + name, + description: typeof record.description === "string" ? record.description : undefined, + trigger: typeof record.trigger === "string" ? record.trigger : undefined, + content, + }; + } + + if (type === "skills-set") { + const skills = Array.isArray(record.skills) + ? record.skills.map(readSkillItem).filter((item): item is SkillBundleItem => Boolean(item)) + : []; + if (!skills.length) { + throw new Error("Skills set bundle has no importable skills."); + } + return { + schemaVersion: 1, + type: "skills-set", + name: name || "Shared skills", + description: typeof record.description === "string" ? record.description : undefined, + skills, + }; + } + + throw new Error(`Unsupported bundle type: ${type || "unknown"}`); +} diff --git a/apps/app/src/app/bundles/skill-org-publish.ts b/apps/app/src/app/bundles/skill-org-publish.ts new file mode 100644 index 0000000000..910fe6bc86 --- /dev/null +++ b/apps/app/src/app/bundles/skill-org-publish.ts @@ -0,0 +1,51 @@ +import { createDenClient, readDenSettings, writeDenSettings } from "../lib/den"; + +export async function saveInstalledSkillToOpenWorkOrg(input: { + skillText: string; + shared?: "org" | "public" | null; + skillHubId?: string | null; +}): Promise<{ skillId: string; orgId: string; orgName: string }> { + const settings = readDenSettings(); + const token = settings.authToken?.trim() ?? ""; + if (!token) { + throw new Error("Sign in to OpenWork Cloud in Settings to share with your team."); + } + + const cloudClient = createDenClient({ baseUrl: settings.baseUrl, apiBaseUrl: settings.apiBaseUrl, token }); + let orgId = settings.activeOrgId?.trim() ?? ""; + let orgSlug = settings.activeOrgSlug?.trim() ?? ""; + let orgName = settings.activeOrgName?.trim() ?? ""; + + if (!orgSlug || !orgName || !orgId) { + const response = await cloudClient.listOrgs(); + const match = orgId + ? response.orgs.find((org) => org.id === orgId) + : response.orgs.find((org) => org.slug === orgSlug) ?? response.orgs[0]; + if (!match) { + throw new Error("Choose an organization in Settings -> Cloud before sharing with your team."); + } + orgId = match.id; + orgSlug = match.slug; + orgName = match.name; + writeDenSettings({ + ...settings, + baseUrl: settings.baseUrl, + authToken: token, + activeOrgId: orgId, + activeOrgSlug: orgSlug, + activeOrgName: orgName, + }); + } + + const created = await cloudClient.createOrgSkill(orgId, { + skillText: input.skillText, + shared: input.shared === undefined ? null : input.shared, + }); + + const hubId = input.skillHubId?.trim() ?? ""; + if (hubId) { + await cloudClient.addOrgSkillToHub(orgId, hubId, created.id); + } + + return { skillId: created.id, orgId, orgName }; +} diff --git a/apps/app/src/app/bundles/sources.ts b/apps/app/src/app/bundles/sources.ts new file mode 100644 index 0000000000..62ceba3827 --- /dev/null +++ b/apps/app/src/app/bundles/sources.ts @@ -0,0 +1,156 @@ +import { desktopFetch } from "../lib/desktop"; +import type { OpenworkServerClient } from "../lib/openwork-server"; +import { isDesktopRuntime, safeStringify } from "../utils"; +import { parseBundlePayload } from "./schema"; +import type { BundleImportIntent, BundleRequest, BundleV1 } from "./types"; +import { extractBundleId, isConfiguredBundlePublisherUrl } from "./url-policy"; + +function isSupportedDeepLinkProtocol(protocol: string): boolean { + const normalized = protocol.toLowerCase(); + return normalized === "openwork:" || normalized === "openwork-dev:" || normalized === "https:" || normalized === "http:"; +} + +export function normalizeBundleImportIntent(value: string | null | undefined): BundleImportIntent { + const normalized = (value ?? "").trim().toLowerCase(); + if (normalized === "new_worker" || normalized === "new-worker" || normalized === "newworker") { + return "new_worker"; + } + return "import_current"; +} + +export function parseBundleDeepLink(rawUrl: string): BundleRequest | null { + let url: URL; + try { + url = new URL(rawUrl); + } catch { + return null; + } + + const protocol = url.protocol.toLowerCase(); + if (!isSupportedDeepLinkProtocol(protocol)) { + return null; + } + + const routeHost = url.hostname.toLowerCase(); + const routePath = url.pathname.replace(/^\/+/, "").toLowerCase(); + const routeSegments = routePath.split("/").filter(Boolean); + const routeTail = routeSegments[routeSegments.length - 1] ?? ""; + const looksLikeImportRoute = routeHost === "import-bundle" || routePath === "import-bundle" || routeTail === "import-bundle"; + + const rawBundleUrl = url.searchParams.get("ow_bundle") ?? url.searchParams.get("bundleUrl") ?? ""; + if (!looksLikeImportRoute && !rawBundleUrl.trim()) { + return null; + } + + try { + if ((protocol === "https:" || protocol === "http:") && !rawBundleUrl.trim()) { + if (isConfiguredBundlePublisherUrl(url.toString())) { + return { + bundleUrl: url.toString(), + intent: normalizeBundleImportIntent(url.searchParams.get("ow_intent") ?? url.searchParams.get("intent")), + source: url.searchParams.get("ow_source")?.trim() ?? url.searchParams.get("source")?.trim() ?? undefined, + label: url.searchParams.get("ow_label")?.trim() ?? url.searchParams.get("label")?.trim() ?? undefined, + }; + } + } + + const parsedBundleUrl = new URL(rawBundleUrl.trim()); + if (parsedBundleUrl.protocol !== "https:" && parsedBundleUrl.protocol !== "http:") { + return null; + } + return { + bundleUrl: parsedBundleUrl.toString(), + intent: normalizeBundleImportIntent(url.searchParams.get("ow_intent") ?? url.searchParams.get("intent")), + source: url.searchParams.get("ow_source")?.trim() ?? url.searchParams.get("source")?.trim() ?? undefined, + label: url.searchParams.get("ow_label")?.trim() ?? url.searchParams.get("label")?.trim() ?? undefined, + }; + } catch { + return null; + } +} + +export function stripBundleQuery(rawUrl: string): string | null { + let url: URL; + try { + url = new URL(rawUrl); + } catch { + return null; + } + + let changed = false; + for (const key of ["ow_bundle", "bundleUrl", "ow_intent", "intent", "ow_source", "source", "ow_org", "ow_label"]) { + if (url.searchParams.has(key)) { + url.searchParams.delete(key); + changed = true; + } + } + + if (!changed) { + return null; + } + + const search = url.searchParams.toString(); + return `${url.pathname}${search ? `?${search}` : ""}${url.hash}`; +} + +export async function fetchBundle( + bundleUrl: string, + serverClient?: OpenworkServerClient | null, + options?: { forceClientFetch?: boolean }, +): Promise { + let targetUrl: URL; + try { + targetUrl = new URL(bundleUrl); + } catch { + throw new Error("Invalid bundle URL."); + } + + if (targetUrl.protocol !== "https:" && targetUrl.protocol !== "http:") { + throw new Error("Bundle URL must use http(s)."); + } + + const bundleId = extractBundleId(targetUrl); + if (bundleId) { + targetUrl.pathname = `/b/${bundleId}/data`; + targetUrl.searchParams.delete("format"); + } + + if (!targetUrl.searchParams.has("format")) { + targetUrl.searchParams.set("format", "json"); + } + + if (serverClient && !options?.forceClientFetch) { + return parseBundlePayload(await serverClient.fetchBundle(targetUrl.toString())); + } + + const controller = new AbortController(); + const timeout = window.setTimeout(() => controller.abort(), 15_000); + + try { + let response: Response; + try { + response = isDesktopRuntime() + ? await desktopFetch(targetUrl.toString(), { + method: "GET", + headers: { Accept: "application/json" }, + signal: controller.signal, + }) + : await fetch(targetUrl.toString(), { + method: "GET", + headers: { Accept: "application/json" }, + signal: controller.signal, + }); + } catch (error) { + const message = error instanceof Error ? error.message : safeStringify(error); + throw new Error(`Failed to load bundle from ${targetUrl.toString()}: ${message}`); + } + if (!response.ok) { + const details = (await response.text()).trim(); + const suffix = details ? `: ${details}` : ""; + throw new Error(`Failed to fetch bundle from ${targetUrl.toString()} (${response.status})${suffix}`); + } + return parseBundlePayload(await response.json()); + } finally { + window.clearTimeout(timeout); + } +} diff --git a/apps/app/src/app/bundles/types.ts b/apps/app/src/app/bundles/types.ts new file mode 100644 index 0000000000..ba25cff3f3 --- /dev/null +++ b/apps/app/src/app/bundles/types.ts @@ -0,0 +1,65 @@ +export type SkillBundleItem = { + name: string; + description?: string; + content: string; + trigger?: string; +}; + +export type SkillBundleV1 = { + schemaVersion: 1; + type: "skill"; + name: string; + description?: string; + trigger?: string; + content: string; +}; + +export type SkillsSetBundleV1 = { + schemaVersion: 1; + type: "skills-set"; + name: string; + description?: string; + skills: SkillBundleItem[]; +}; + +export type BundleV1 = SkillBundleV1 | SkillsSetBundleV1; + +export type BundleImportIntent = "new_worker" | "import_current"; + +export type BundleRequest = { + bundleUrl?: string | null; + intent: BundleImportIntent; + source?: string; + label?: string; +}; + +export type BundleImportTarget = { + workspaceId?: string | null; + localRoot?: string | null; + directoryHint?: string | null; +}; + +export type SkillDestinationRequest = { + request: BundleRequest; + bundle: SkillBundleV1; +}; + +export type BundleImportChoice = { + request: BundleRequest; + bundle: BundleV1; +}; + +export type BundleWorkerOption = { + id: string; + label: string; + detail: string; + badge: string; + current: boolean; + disabledReason?: string | null; +}; + +export type BundleImportSummary = { + title: string; + description: string; + items: string[]; +}; diff --git a/apps/app/src/app/bundles/url-policy.ts b/apps/app/src/app/bundles/url-policy.ts new file mode 100644 index 0000000000..c40bd2bcbc --- /dev/null +++ b/apps/app/src/app/bundles/url-policy.ts @@ -0,0 +1,49 @@ +import { DEFAULT_OPENWORK_PUBLISHER_BASE_URL } from "../lib/publisher"; + +export type BundleUrlTrust = { + trusted: boolean; + bundleId: string | null; + actualOrigin: string | null; + configuredOrigin: string | null; +}; + +export function extractBundleId(url: URL): string | null { + const segments = url.pathname.split("/").filter(Boolean); + if (segments[0] === "b" && segments[1] && (segments.length === 2 || (segments.length === 3 && segments[2] === "data"))) { + return segments[1]; + } + return null; +} + +export function resolveConfiguredBundlePublisherOrigin(baseUrl = DEFAULT_OPENWORK_PUBLISHER_BASE_URL): string | null { + try { + return new URL(baseUrl).origin; + } catch { + return null; + } +} + +export function describeBundleUrlTrust(bundleUrl: string, baseUrl = DEFAULT_OPENWORK_PUBLISHER_BASE_URL): BundleUrlTrust { + const configuredOrigin = resolveConfiguredBundlePublisherOrigin(baseUrl); + try { + const url = new URL(bundleUrl); + const bundleId = extractBundleId(url); + return { + trusted: Boolean(configuredOrigin && url.origin === configuredOrigin && bundleId), + bundleId, + actualOrigin: url.origin, + configuredOrigin, + }; + } catch { + return { + trusted: false, + bundleId: null, + actualOrigin: null, + configuredOrigin, + }; + } +} + +export function isConfiguredBundlePublisherUrl(bundleUrl: string, baseUrl = DEFAULT_OPENWORK_PUBLISHER_BASE_URL): boolean { + return describeBundleUrlTrust(bundleUrl, baseUrl).trusted; +} diff --git a/apps/app/src/app/cloud/desktop-app-restrictions.ts b/apps/app/src/app/cloud/desktop-app-restrictions.ts new file mode 100644 index 0000000000..92f9bf5777 --- /dev/null +++ b/apps/app/src/app/cloud/desktop-app-restrictions.ts @@ -0,0 +1,79 @@ +import type { DesktopAppRestrictions } from "@openwork/types/den/desktop-app-restrictions"; +import type { DenDesktopConfig } from "../lib/den"; +import type { ModelRef } from "../types"; + +export type DesktopAppRestrictionKey = keyof DesktopAppRestrictions; + +export type DesktopAppRestrictionChecker = (input: { + restriction: DesktopAppRestrictionKey; +}) => boolean; + +export const DESKTOP_RESTRICTION_OPENCODE_PROVIDER_ID = "opencode"; + +export function checkDesktopAppRestriction(input: { + config: DenDesktopConfig | null | undefined; + restriction: DesktopAppRestrictionKey; +}) { + return input.config?.[input.restriction] === true; +} + +export function isDesktopProviderBlocked(input: { + providerId: string; + checkRestriction: DesktopAppRestrictionChecker; +}) { + const providerId = input.providerId.trim().toLowerCase(); + if (!providerId) return false; + + if (providerId === DESKTOP_RESTRICTION_OPENCODE_PROVIDER_ID) { + return input.checkRestriction({ restriction: "blockZenModel" }); + } + + return false; +} + +export function isDesktopModelBlocked(input: { + model: ModelRef; + checkRestriction: DesktopAppRestrictionChecker; +}) { + return isDesktopProviderBlocked({ + providerId: input.model.providerID, + checkRestriction: input.checkRestriction, + }); +} + +type DesktopAppRestrictionSyncContext = { + checkRestriction: DesktopAppRestrictionChecker; + reconcileRestrictedModels?: () => void; + ensureProjectProviderDisabledState?: (providerId: string, disabled: boolean) => Promise; + onError?: (error: Error, details: { + restriction: DesktopAppRestrictionKey; + action: string; + providerId?: string; + }) => void; +}; + +export async function runDesktopAppRestrictionSyncEffects( + input: DesktopAppRestrictionSyncContext, +) { + const shouldDisableOpencodeProvider = input.checkRestriction({ restriction: "blockZenModel" }); + + input.reconcileRestrictedModels?.(); + + if (input.ensureProjectProviderDisabledState) { + try { + await input.ensureProjectProviderDisabledState( + DESKTOP_RESTRICTION_OPENCODE_PROVIDER_ID, + shouldDisableOpencodeProvider, + ); + } catch (error) { + input.onError?.( + error instanceof Error ? error : new Error(String(error ?? "Desktop restriction effect failed.")), + { + restriction: "blockZenModel", + action: "ensureProjectProviderDisabledState", + providerId: DESKTOP_RESTRICTION_OPENCODE_PROVIDER_ID, + }, + ); + } + } +} diff --git a/apps/app/src/app/cloud/import-state.ts b/apps/app/src/app/cloud/import-state.ts new file mode 100644 index 0000000000..aeccfa3ed4 --- /dev/null +++ b/apps/app/src/app/cloud/import-state.ts @@ -0,0 +1,208 @@ +export type CloudImportedSkillHub = { + hubId: string; + name: string; + skillNames: string[]; + skillIds: string[]; + importedAt: number | null; +}; + +export type CloudImportedSkill = { + cloudSkillId: string; + installedName: string; + title: string; + description: string | null; + shared: "org" | "public" | null; + updatedAt: string | null; + importedAt: number | null; +}; + +export type CloudImportedProvider = { + cloudProviderId: string; + providerId: string; + sourceProviderId: string; + name: string; + source: string | null; + updatedAt: string | null; + modelIds: string[]; + importedAt: number | null; +}; + +export type CloudImportedPluginFile = { + configObjectId: string; + versionId: string | null; + objectType: string; + title: string; + path: string; + updatedAt: string | null; +}; + +export type CloudImportedPlugin = { + pluginId: string; + marketplaceId: string | null; + name: string; + description: string | null; + updatedAt: string | null; + files: CloudImportedPluginFile[]; + importedAt: number | null; +}; + +export type WorkspaceCloudImports = { + skillHubs: Record; + skills: Record; + providers: Record; + plugins: Record; +}; + +const isRecord = (value: unknown): value is Record => + Boolean(value) && typeof value === "object" && !Array.isArray(value); + +const readStringArray = (value: unknown) => + Array.isArray(value) + ? value.filter((entry): entry is string => typeof entry === "string" && entry.trim().length > 0) + : []; + +export function readWorkspaceCloudImports(value: unknown): WorkspaceCloudImports { + const root = isRecord(value) ? value : {}; + const cloudImports = isRecord(root.cloudImports) ? root.cloudImports : {}; + const rawSkillHubs = isRecord(cloudImports.skillHubs) ? cloudImports.skillHubs : {}; + const rawSkills = isRecord(cloudImports.skills) ? cloudImports.skills : {}; + const rawProviders = isRecord(cloudImports.providers) ? cloudImports.providers : {}; + const rawPlugins = isRecord(cloudImports.plugins) ? cloudImports.plugins : {}; + + const skillHubs = Object.fromEntries( + Object.entries(rawSkillHubs) + .map(([key, entry]) => { + if (!isRecord(entry)) return null; + const hubId = typeof entry.hubId === "string" ? entry.hubId.trim() : key.trim(); + const name = typeof entry.name === "string" ? entry.name.trim() : hubId; + if (!hubId || !name) return null; + const imported = { + hubId, + name, + skillNames: readStringArray(entry.skillNames), + skillIds: readStringArray(entry.skillIds), + importedAt: typeof entry.importedAt === "number" && Number.isFinite(entry.importedAt) + ? entry.importedAt + : null, + } satisfies CloudImportedSkillHub; + return [hubId, imported] as const; + }) + .filter((entry): entry is readonly [string, CloudImportedSkillHub] => Boolean(entry)), + ); + + const providers = Object.fromEntries( + Object.entries(rawProviders) + .map(([key, entry]) => { + if (!isRecord(entry)) return null; + const cloudProviderId = typeof entry.cloudProviderId === "string" + ? entry.cloudProviderId.trim() + : key.trim(); + const providerId = typeof entry.providerId === "string" ? entry.providerId.trim() : ""; + const sourceProviderId = typeof entry.sourceProviderId === "string" + ? entry.sourceProviderId.trim() + : providerId; + const name = typeof entry.name === "string" ? entry.name.trim() : providerId || cloudProviderId; + if (!cloudProviderId || !providerId || !sourceProviderId || !name) return null; + const imported = { + cloudProviderId, + providerId, + sourceProviderId, + name, + source: typeof entry.source === "string" ? entry.source.trim() || null : null, + updatedAt: typeof entry.updatedAt === "string" ? entry.updatedAt.trim() || null : null, + modelIds: readStringArray(entry.modelIds), + importedAt: typeof entry.importedAt === "number" && Number.isFinite(entry.importedAt) + ? entry.importedAt + : null, + } satisfies CloudImportedProvider; + return [cloudProviderId, imported] as const; + }) + .filter((entry): entry is readonly [string, CloudImportedProvider] => Boolean(entry)), + ); + + const skills = Object.fromEntries( + Object.entries(rawSkills) + .map(([key, entry]) => { + if (!isRecord(entry)) return null; + const cloudSkillId = typeof entry.cloudSkillId === "string" + ? entry.cloudSkillId.trim() + : key.trim(); + const installedName = typeof entry.installedName === "string" ? entry.installedName.trim() : ""; + const title = typeof entry.title === "string" ? entry.title.trim() : installedName || cloudSkillId; + if (!cloudSkillId || !installedName || !title) return null; + const imported = { + cloudSkillId, + installedName, + title, + description: typeof entry.description === "string" ? entry.description.trim() || null : null, + shared: entry.shared === "org" || entry.shared === "public" ? entry.shared : null, + updatedAt: typeof entry.updatedAt === "string" ? entry.updatedAt.trim() || null : null, + importedAt: typeof entry.importedAt === "number" && Number.isFinite(entry.importedAt) + ? entry.importedAt + : null, + } satisfies CloudImportedSkill; + return [cloudSkillId, imported] as const; + }) + .filter((entry): entry is readonly [string, CloudImportedSkill] => Boolean(entry)), + ); + + const plugins = Object.fromEntries( + Object.entries(rawPlugins) + .map(([key, entry]) => { + if (!isRecord(entry)) return null; + const pluginId = typeof entry.pluginId === "string" ? entry.pluginId.trim() : key.trim(); + const name = typeof entry.name === "string" ? entry.name.trim() : pluginId; + if (!pluginId || !name) return null; + const files = Array.isArray(entry.files) + ? entry.files + .map((file): CloudImportedPluginFile | null => { + if (!isRecord(file)) return null; + const configObjectId = typeof file.configObjectId === "string" ? file.configObjectId.trim() : ""; + const objectType = typeof file.objectType === "string" ? file.objectType.trim() : ""; + const title = typeof file.title === "string" ? file.title.trim() : configObjectId; + const path = typeof file.path === "string" ? file.path.trim() : ""; + if (!configObjectId || !objectType || !title || !path) return null; + return { + configObjectId, + versionId: typeof file.versionId === "string" ? file.versionId.trim() || null : null, + objectType, + title, + path, + updatedAt: typeof file.updatedAt === "string" ? file.updatedAt.trim() || null : null, + }; + }) + .filter((file): file is CloudImportedPluginFile => file !== null) + : []; + const imported = { + pluginId, + marketplaceId: typeof entry.marketplaceId === "string" ? entry.marketplaceId.trim() || null : null, + name, + description: typeof entry.description === "string" ? entry.description.trim() || null : null, + updatedAt: typeof entry.updatedAt === "string" ? entry.updatedAt.trim() || null : null, + files, + importedAt: typeof entry.importedAt === "number" && Number.isFinite(entry.importedAt) + ? entry.importedAt + : null, + } satisfies CloudImportedPlugin; + return [pluginId, imported] as const; + }) + .filter((entry): entry is readonly [string, CloudImportedPlugin] => Boolean(entry)), + ); + + return { skillHubs, skills, providers, plugins }; +} + +export function withWorkspaceCloudImports( + config: Record, + cloudImports: WorkspaceCloudImports, +) { + return { + ...config, + cloudImports: { + skillHubs: cloudImports.skillHubs, + skills: cloudImports.skills, + providers: cloudImports.providers, + plugins: cloudImports.plugins, + }, + }; +} diff --git a/apps/app/src/app/cloud/sync/constants.ts b/apps/app/src/app/cloud/sync/constants.ts new file mode 100644 index 0000000000..30e73fb0fa --- /dev/null +++ b/apps/app/src/app/cloud/sync/constants.ts @@ -0,0 +1 @@ +export const CLOUD_SYNC_INTERVAL_MS = 5 * 60 * 1000; diff --git a/apps/app/src/app/constants.ts b/apps/app/src/app/constants.ts new file mode 100644 index 0000000000..1af79fe204 --- /dev/null +++ b/apps/app/src/app/constants.ts @@ -0,0 +1,75 @@ +import type { ModelRef, SuggestedPlugin } from "./types"; +import { t } from "../i18n"; + +export const MODEL_PREF_KEY = "openwork.defaultModel"; +export const SESSION_MODEL_PREF_KEY = "openwork.sessionModels"; +export const THINKING_PREF_KEY = "openwork.showThinking"; +export const VARIANT_PREF_KEY = "openwork.modelVariant"; +export const LANGUAGE_PREF_KEY = "openwork.language"; +export const HIDE_TITLEBAR_PREF_KEY = "openwork.hideTitlebar"; + +export const DEFAULT_MODEL: ModelRef = { + providerID: "opencode", + modelID: "big-pickle", +}; + +export const SUGGESTED_PLUGINS: SuggestedPlugin[] = []; + +export type McpDirectoryInfo = { + id?: string; + name: string; + description: string; + url?: string; + type?: "remote" | "local"; + command?: string[]; + oauth: boolean; +}; + +export const CHROME_DEVTOOLS_MCP_ID = "chrome-devtools"; +export const CHROME_DEVTOOLS_MCP_COMMAND = ["npx", "-y", "chrome-devtools-mcp@latest"] as const; + +export const MCP_QUICK_CONNECT: McpDirectoryInfo[] = [ + { + get name() { return t("mcp.quick_connect_notion_title"); }, + get description() { return t("mcp.quick_connect_notion_desc"); }, + url: "https://mcp.notion.com/mcp", + type: "remote", + oauth: true, + }, + { + get name() { return t("mcp.quick_connect_linear_title"); }, + get description() { return t("mcp.quick_connect_linear_desc"); }, + url: "https://mcp.linear.app/mcp", + type: "remote", + oauth: true, + }, + { + get name() { return t("mcp.quick_connect_sentry_title"); }, + get description() { return t("mcp.quick_connect_sentry_desc"); }, + url: "https://mcp.sentry.dev/mcp", + type: "remote", + oauth: true, + }, + { + get name() { return t("mcp.quick_connect_stripe_title"); }, + get description() { return t("mcp.quick_connect_stripe_desc"); }, + url: "https://mcp.stripe.com", + type: "remote", + oauth: true, + }, + { + get name() { return t("mcp.quick_connect_context7_title"); }, + get description() { return t("mcp.quick_connect_context7_desc"); }, + url: "https://mcp.context7.com/mcp", + type: "remote", + oauth: false, + }, + { + id: CHROME_DEVTOOLS_MCP_ID, + get name() { return t("mcp.quick_connect_chrome_title"); }, + get description() { return t("mcp.quick_connect_chrome_desc"); }, + type: "local", + command: [...CHROME_DEVTOOLS_MCP_COMMAND], + oauth: false, + }, +]; diff --git a/apps/app/src/app/data/commands/browser-setup.md b/apps/app/src/app/data/commands/browser-setup.md new file mode 100644 index 0000000000..4043a2ba84 --- /dev/null +++ b/apps/app/src/app/data/commands/browser-setup.md @@ -0,0 +1,14 @@ +--- +name: browser-setup +description: Try Control Chrome first, then explain setup if Chrome MCP is unavailable +--- + +Try browser automation in OpenWork right away. + +IMPORTANT: +- Prefer Chrome DevTools MCP / `chrome-devtools_*` tools first. +- If those tools are available, use them in your first response to open `https://example.com` and tell the user the page title. +- If those tools are not available, do not invent substitute tools and do not fall back to Playwright first. +- Instead, tell the user the shortest exact steps to connect `Control Chrome` from OpenWork's MCP tab, then ask them to retry. + +Keep the response short and action-oriented. diff --git a/apps/app/src/app/data/skill-creator.md b/apps/app/src/app/data/skill-creator.md new file mode 100644 index 0000000000..dc83ab70bb --- /dev/null +++ b/apps/app/src/app/data/skill-creator.md @@ -0,0 +1,78 @@ +--- +name: skill-creator +description: Guide for creating effective skills. Use when users want to create or update a skill that extends OpenCode with specialized knowledge, workflows, or tool integrations. +--- + +# Skill Creator + +This skill is a template + checklist for creating skills in a workspace. + +## What is a skill? + +A skill is a folder under `.opencode/skills//` or `.claude/skills//` anchored by `SKILL.md`. + +## OpenWork behavior + +- In OpenWork, prefer creating the skill at `.opencode/skills//SKILL.md`. +- Use a file mutation tool (`write`, `edit`, or `apply_patch`) on the real skill path instead of pasting the whole skill into chat. +- Writing the skill file lets OpenWork show the reload banner above the conversation so the user can activate the new skill immediately. + +## Design goals + +- Portable: safe to copy between machines +- Reconstructable: can recreate any required local state +- Self-building: can bootstrap its own config/state +- Credential-safe: no secrets committed; graceful first-time setup + +## Recommended structure + +``` +.opencode/ + skills/ + my-skill/ + SKILL.md + README.md + templates/ + scripts/ +``` + +## Trigger phrases (critical) + +The description field is how Claude decides when to use your skill. +Include 2-3 specific phrases that should trigger it. + +Bad example: +"Use when working with content" + +Good examples: +"Use when user mentions 'content pipeline', 'add to content database', or 'schedule a post'" +"Triggers on: 'rotate PDF', 'flip PDF pages', 'change PDF orientation'" + +Quick validation: +- Contains at least one quoted phrase +- Uses "when" or "triggers" +- Longer than ~50 characters + +## Frontmatter template + +```yaml +--- +name: my-skill +description: | + [What it does in one sentence] + + Triggers when user mentions: + - "[specific phrase 1]" + - "[specific phrase 2]" + - "[specific phrase 3]" +--- +``` + +## Authoring checklist + +1. Start with a clear purpose statement: when to use it + what it outputs. +2. Specify inputs/outputs and any required permissions. +3. Include “Setup” steps if the skill needs local tooling. +4. Add examples: at least 2 realistic user prompts. +5. Keep it safe: avoid destructive defaults; ask for confirmation. +6. In OpenWork, finish by writing the final `SKILL.md` file to `.opencode/skills//SKILL.md` so the reload banner can appear. diff --git a/apps/app/src/app/index.css b/apps/app/src/app/index.css new file mode 100644 index 0000000000..d0612070d5 --- /dev/null +++ b/apps/app/src/app/index.css @@ -0,0 +1,459 @@ +@import "tailwindcss"; +@config "../../tailwind.config.ts"; + +@import "../styles/colors.css"; + +@import "tw-animate-css"; + +@import "shadcn/tailwind.css"; + +@import "@fontsource-variable/geist"; + +@import "@fontsource-variable/ibm-plex-sans"; + +@custom-variant dark (&:is(.dark, .dark *, [data-theme="dark"], [data-theme="dark"] *)); + +:root { + color-scheme: light; + --dls-surface: var(--slate-1); + --dls-sidebar: var(--slate-2); + --dls-app-bg: var(--slate-1); + --dls-background: var(--slate-1); + --dls-canvas: var(--slate-2); + --dls-surface-muted: var(--slate-3); + --dls-border: #f3f4f6; + --dls-accent: var(--blue-9); + --dls-accent-hover: var(--blue-10); + --dls-accent-rgb: 0 144 255; + --dls-secondary-rgb: 96 100 108; + --dls-text-primary: var(--slate-12); + --dls-text-secondary: var(--slate-11); + --dls-hover: var(--slate-3); + --dls-active: var(--slate-5); + --dls-radius: 16px; + --dls-radius-lg: 24px; + --dls-shell-shadow: 0 10px 30px rgba(15, 23, 42, 0.06); + --dls-card-shadow: 0 8px 24px rgba(15, 23, 42, 0.05); + --background: var(--slate-1); + --foreground: var(--slate-12); + --card: var(--slate-1); + --card-foreground: var(--slate-12); + --popover: var(--slate-1); + --popover-foreground: var(--slate-12); + --primary-foreground: white; + --secondary: var(--slate-3); + --secondary-foreground: var(--slate-12); + --muted: var(--slate-3); + --muted-foreground: var(--slate-11); + --accent: var(--slate-3); + --accent-foreground: var(--slate-12); + --destructive: var(--red-9); + --border: var(--slate-5); + --input: var(--slate-6); + --ring: var(--slate-8); + --chart-1: var(--sky-9); + --chart-2: var(--blue-9); + --chart-3: var(--indigo-9); + --chart-4: var(--violet-9); + --chart-5: var(--purple-9); + --radius: 0.45rem; + --sidebar: var(--slate-2); + --sidebar-foreground: var(--slate-12); + --sidebar-primary: var(--blue-9); + --sidebar-primary-foreground: white; + --sidebar-accent: var(--slate-3); + --sidebar-accent-foreground: var(--slate-12); + --sidebar-border: var(--slate-6); + --sidebar-ring: var(--slate-8); +} + +[data-theme="dark"] { + color-scheme: dark; + --dls-surface: var(--slate-1); + --dls-sidebar: var(--slate-2); + --dls-app-bg: var(--slate-1); + --dls-background: var(--slate-1); + --dls-canvas: var(--slate-2); + --dls-surface-muted: var(--slate-3); + --dls-border: #262626; + --dls-accent: var(--blue-9); + --dls-accent-hover: var(--blue-10); + --dls-accent-rgb: 59 142 247; + --dls-secondary-rgb: 176 180 186; + --dls-text-primary: var(--slate-12); + --dls-text-secondary: var(--slate-11); + --dls-hover: var(--slate-3); + --dls-active: var(--slate-5); + --dls-shell-shadow: 0 18px 48px rgba(0, 0, 0, 0.32); + --dls-card-shadow: 0 14px 36px rgba(0, 0, 0, 0.24); +} + +html, +body { + height: 100%; + overflow: hidden; +} + +#root { + height: 100%; + overflow: hidden; +} + +html { + font-size: var(--openwork-font-size, 16px); +} + +body { + margin: 0; + font-family: + "IBM Plex Sans", + Geist, + "Avenir Next", + Inter, + ui-sans-serif, + system-ui, + -apple-system, + BlinkMacSystemFont, + "Segoe UI", + Roboto, + "Helvetica Neue", + Arial, + "Noto Sans", + "Apple Color Emoji", + "Segoe UI Emoji"; + font-size: 0.875rem; + line-height: 1.5; + color: var(--dls-text-primary); + background-color: var(--dls-surface); + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.ow-soft-shell { + border: 1px solid var(--dls-border); + background: var(--dls-surface); + border-radius: 2rem; + box-shadow: var(--dls-shell-shadow); +} + +.ow-soft-card { + border: 1px solid var(--dls-border); + background: var(--dls-surface); + border-radius: 1.5rem; + box-shadow: var(--dls-card-shadow); +} + +.ow-soft-card-quiet { + border: 1px solid var(--dls-border); + background: var(--dls-sidebar); + border-radius: 1.5rem; +} + +.ow-button-primary { + display: inline-flex; + min-height: 48px; + align-items: center; + justify-content: center; + border-radius: 9999px; + background: var(--dls-accent); + color: white; + box-shadow: 0 8px 20px -16px rgba(var(--dls-accent-rgb), 0.45); +} + +.ow-button-primary:hover:not(:disabled) { + background: var(--dls-accent-hover); +} + +.ow-button-secondary { + display: inline-flex; + min-height: 48px; + align-items: center; + justify-content: center; + border-radius: 9999px; + border: 1px solid var(--dls-border); + background: var(--dls-surface); + color: var(--dls-text-primary); + box-shadow: var(--dls-card-shadow); +} + +.ow-button-secondary:hover:not(:disabled) { + background: var(--dls-hover); +} + +.ow-button-primary, +.ow-button-secondary { + padding: 0.75rem 1.25rem; + font-size: 13px; + font-weight: 500; + transition: background-color 150ms ease, color 150ms ease, transform 150ms ease, opacity 150ms ease; +} + +.ow-button-primary:active:not(:disabled), +.ow-button-secondary:active:not(:disabled) { + transform: scale(0.99); +} + +.ow-button-primary:disabled, +.ow-button-secondary:disabled { + opacity: 0.5; +} + +.ow-status-pill { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 9999px; + padding: 0.25rem 0.625rem; + font-size: 10px; + font-weight: 700; + letter-spacing: 0.18em; + text-transform: uppercase; +} + +.ow-status-pill-positive { + border: 1px solid var(--green-a6); + background: var(--green-3); + color: var(--green-11); +} + +.ow-status-pill-warning { + border: 1px solid var(--amber-a6); + background: var(--amber-3); + color: var(--amber-11); +} + +.ow-status-pill-neutral { + border: 1px solid var(--slate-6); + background: var(--slate-2); + color: var(--slate-11); +} + +.ow-icon-tile { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 0.85rem; + background: var(--slate-3); + color: var(--slate-12); +} + +.ow-icon-tile-muted { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 0.85rem; + background: var(--slate-3); + color: var(--slate-11); +} + +.ow-input { + appearance: none; + width: 100%; + border: 0; + border-radius: 0.9rem; + background: var(--slate-1); + box-shadow: inset 0 0 0 1px var(--slate-6); + color: var(--dls-text-primary); +} + +.ow-input::placeholder { + color: var(--slate-11); +} + +.ow-input:focus { + outline: none; + box-shadow: inset 0 0 0 1px rgba(var(--dls-accent-rgb), 0.28), 0 0 0 3px rgba(var(--dls-accent-rgb), 0.08); +} + +/* Global clickable elements pointer */ +button, +[role="button"], +a, +input[type="submit"], +input[type="button"], +input[type="checkbox"], +input[type="radio"], +select { + cursor: pointer; +} + +button:disabled, +[role="button"][aria-disabled="true"], +input:disabled, +select:disabled { + cursor: not-allowed; +} + +@utility animate-spin-slow { + animation: spin 3s linear infinite; +} + +@keyframes soft-pulse { + 0%, + 100% { + transform: scale(1); + opacity: 0.4; + } + 50% { + transform: scale(1.15); + opacity: 1; + } +} + +@utility animate-soft-pulse { + animation: soft-pulse 1.5s cubic-bezier(0.4, 0, 0.6, 1) infinite; +} + +/* Highlight animation for just-saved command */ +@keyframes command-highlight { + 0% { + box-shadow: 0 0 0 0 var(--indigo-a9); + border-color: var(--indigo-a9); + } + 50% { + box-shadow: 0 0 0 8px var(--indigo-a1); + border-color: var(--indigo-a7); + } + 100% { + box-shadow: 0 0 0 0 var(--indigo-a1); + border-color: var(--slate-a3); + } +} + +.command-just-saved { + animation: command-highlight 2s ease-out; + border-color: var(--indigo-a9); +} + +@keyframes progress-shimmer { + 0% { + transform: translateX(-100%); + } + 100% { + transform: translateX(200%); + } +} + +@utility animate-progress-shimmer { + animation: progress-shimmer 2s infinite linear; +} + +/* Quiet, small dot ticker (`:: :: ::`). Each dot pulses briefly in sequence, + illuminating left-to-right like a subtle running light. Used for boot, + "awaiting first token", and any idle-but-alive hint. */ +@keyframes ow-dot-ticker { + 0% { + background-color: rgba(var(--dls-secondary-rgb, 120, 120, 120), 0.22); + box-shadow: 0 0 0 rgba(var(--dls-accent-rgb), 0); + } + 20% { + background-color: rgba(var(--dls-accent-rgb), 0.95); + box-shadow: 0 0 8px rgba(var(--dls-accent-rgb), 0.45); + } + 55%, + 100% { + background-color: rgba(var(--dls-secondary-rgb, 120, 120, 120), 0.22); + box-shadow: 0 0 0 rgba(var(--dls-accent-rgb), 0); + } +} + +.ow-dot-ticker { + animation: ow-dot-ticker 1.4s cubic-bezier(0.4, 0, 0.2, 1) infinite; + background-color: rgba(var(--dls-secondary-rgb, 120, 120, 120), 0.22); +} + +@media (prefers-reduced-motion: reduce) { + .ow-dot-ticker { + animation: none; + background-color: rgba(var(--dls-accent-rgb), 0.6); + } +} + +.dark, +[data-theme="dark"] { + --background: var(--slate-1); + --foreground: var(--slate-12); + --card: var(--slate-2); + --card-foreground: var(--slate-12); + --popover: var(--slate-2); + --popover-foreground: var(--slate-12); + --primary-foreground: var(--slate-12); + --secondary: var(--slate-3); + --secondary-foreground: var(--slate-12); + --muted: var(--slate-3); + --muted-foreground: var(--slate-10); + --accent: var(--slate-3); + --accent-foreground: var(--slate-12); + --destructive: var(--red-9); + --border: var(--slate-3); + --input: var(--slate-6); + --ring: var(--slate-8); + --chart-1: var(--sky-9); + --chart-2: var(--blue-9); + --chart-3: var(--indigo-9); + --chart-4: var(--violet-9); + --chart-5: var(--purple-9); + --sidebar: var(--slate-2); + --sidebar-foreground: var(--slate-12); + --sidebar-primary: var(--blue-9); + --sidebar-primary-foreground: var(--slate-12); + --sidebar-accent: var(--slate-3); + --sidebar-accent-foreground: var(--slate-12); + --sidebar-border: var(--slate-6); + --sidebar-ring: var(--slate-8); +} + +@theme inline { + --font-sans: 'Geist Variable', sans-serif; + --font-heading: 'IBM Plex Sans Variable', sans-serif; + --color-sidebar-ring: var(--sidebar-ring); + --color-sidebar-border: var(--sidebar-border); + --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); + --color-sidebar-accent: var(--sidebar-accent); + --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); + --color-sidebar-primary: var(--sidebar-primary); + --color-sidebar-foreground: var(--sidebar-foreground); + --color-sidebar: var(--sidebar); + --color-chart-5: var(--chart-5); + --color-chart-4: var(--chart-4); + --color-chart-3: var(--chart-3); + --color-chart-2: var(--chart-2); + --color-chart-1: var(--chart-1); + --color-ring: var(--ring); + --color-input: var(--input); + --color-border: var(--border); + --color-destructive: var(--destructive); + --color-accent-foreground: var(--accent-foreground); + --color-accent: var(--accent); + --color-muted-foreground: var(--muted-foreground); + --color-muted: var(--muted); + --color-secondary-foreground: var(--secondary-foreground); + --color-secondary: var(--secondary); + --color-primary-foreground: var(--primary-foreground); + --color-primary: var(--dls-accent); + --color-popover-foreground: var(--popover-foreground); + --color-popover: var(--popover); + --color-card-foreground: var(--card-foreground); + --color-card: var(--card); + --color-foreground: var(--foreground); + --color-background: var(--background); + --radius-sm: calc(var(--radius) * 0.6); + --radius-md: calc(var(--radius) * 0.8); + --radius-lg: var(--radius); + --radius-xl: calc(var(--radius) * 1.4); + --radius-2xl: calc(var(--radius) * 1.8); + --radius-3xl: calc(var(--radius) * 2.2); + --radius-4xl: calc(var(--radius) * 2.6); +} + +@layer base { + * { + @apply border-border outline-ring/50; + } + body { + @apply bg-background text-foreground; + } + html { + @apply font-sans; + } +} diff --git a/apps/app/src/app/lib/deep-link-bridge.ts b/apps/app/src/app/lib/deep-link-bridge.ts new file mode 100644 index 0000000000..a0b716dcf4 --- /dev/null +++ b/apps/app/src/app/lib/deep-link-bridge.ts @@ -0,0 +1,43 @@ +export const deepLinkBridgeEvent = "openwork:deep-link"; +export const nativeDeepLinkEvent = "openwork:deep-link-native"; + +export type DeepLinkBridgeDetail = { + urls: string[]; +}; + +declare global { + interface Window { + __OPENWORK__?: { + deepLinks?: string[]; + }; + } +} + +function normalizeDeepLinks(urls: readonly string[]): string[] { + return urls.map((url) => url.trim()).filter(Boolean); +} + +export function pushPendingDeepLinks(target: Window, urls: readonly string[]): string[] { + const normalized = normalizeDeepLinks(urls); + if (normalized.length === 0) { + return []; + } + + target.__OPENWORK__ ??= {}; + const pending = target.__OPENWORK__.deepLinks ?? []; + target.__OPENWORK__.deepLinks = [...pending, ...normalized]; + target.dispatchEvent( + new CustomEvent(deepLinkBridgeEvent, { + detail: { urls: normalized }, + }), + ); + return normalized; +} + +export function drainPendingDeepLinks(target: Window): string[] { + const pending = target.__OPENWORK__?.deepLinks ?? []; + if (target.__OPENWORK__) { + target.__OPENWORK__.deepLinks = []; + } + return [...pending]; +} diff --git a/apps/app/src/app/lib/den-session-events.ts b/apps/app/src/app/lib/den-session-events.ts new file mode 100644 index 0000000000..131841fac0 --- /dev/null +++ b/apps/app/src/app/lib/den-session-events.ts @@ -0,0 +1,41 @@ +import type { DenSettings, DenUser } from "./den"; + +export const denSessionUpdatedEvent = "openwork-den-session-updated"; +export const denSettingsChangedEvent = "openwork-den-settings-changed"; + +export type DenSessionUpdatedDetail = { + status?: "success" | "error"; + baseUrl?: string | null; + token?: string | null; + user?: DenUser | null; + email?: string | null; + message?: string | null; +}; + +export function dispatchDenSessionUpdated(detail: DenSessionUpdatedDetail) { + if (typeof window === "undefined") { + return; + } + + window.dispatchEvent( + new CustomEvent(denSessionUpdatedEvent, { + detail, + }), + ); +} + +export type DenSettingsChangedDetail = { + settings: DenSettings; +}; + +export function dispatchDenSettingsChanged(detail: DenSettingsChangedDetail) { + if (typeof window === "undefined") { + return; + } + + window.dispatchEvent( + new CustomEvent(denSettingsChangedEvent, { + detail, + }), + ); +} diff --git a/apps/app/src/app/lib/den.ts b/apps/app/src/app/lib/den.ts new file mode 100644 index 0000000000..db15c87abe --- /dev/null +++ b/apps/app/src/app/lib/den.ts @@ -0,0 +1,1524 @@ +import { + normalizeDesktopConfig, + type DesktopConfig as SharedDesktopConfig, +} from "@openwork/types/den/desktop-app-restrictions"; + +// Re-export the shared schema under the local alias so React consumers +// (e.g. the cloud domain's desktop-config provider) can import it alongside +// the helpers they need. Solid references it internally only; the React +// port wants it as part of the public surface of this module. +export type { SharedDesktopConfig }; +export { normalizeDesktopConfig }; + +import { isDesktopDeployment } from "./openwork-deployment"; +import { + dispatchDenSettingsChanged, +} from "./den-session-events"; +import { + desktopFetch, + getDesktopBootstrapConfig as getDesktopBootstrapConfigFromShell, + setDesktopBootstrapConfig as setDesktopBootstrapConfigInShell, + type DesktopBootstrapConfig as ShellDesktopBootstrapConfig, +} from "./desktop"; +import { isDesktopRuntime } from "../utils"; +import type { DenOrgSkillCard } from "../types"; + +const STORAGE_BASE_URL = "openwork.den.baseUrl"; +const STORAGE_API_BASE_URL = "openwork.den.apiBaseUrl"; +const STORAGE_AUTH_TOKEN = "openwork.den.authToken"; +const STORAGE_ACTIVE_ORG_ID = "openwork.den.activeOrgId"; +const STORAGE_ACTIVE_ORG_SLUG = "openwork.den.activeOrgSlug"; +const STORAGE_ACTIVE_ORG_NAME = "openwork.den.activeOrgName"; +const DEFAULT_DEN_TIMEOUT_MS = 12_000; + +export const DEFAULT_DEN_AUTH_NAME = "OpenWork User"; +const BUILD_DEN_BASE_URL = + (typeof import.meta !== "undefined" && typeof import.meta.env?.VITE_DEN_BASE_URL === "string" + ? import.meta.env.VITE_DEN_BASE_URL + : "").trim() || "https://app.openworklabs.com"; +const BUILD_DEN_API_BASE_URL = + (typeof import.meta !== "undefined" && typeof import.meta.env?.VITE_DEN_API_BASE_URL === "string" + ? import.meta.env.VITE_DEN_API_BASE_URL + : "").trim() || undefined; +const BUILD_DEN_REQUIRE_SIGNIN = + (typeof import.meta !== "undefined" && typeof import.meta.env?.VITE_DEN_REQUIRE_SIGNIN === "string" + ? /^(1|true|yes|on)$/i.test(import.meta.env.VITE_DEN_REQUIRE_SIGNIN.trim()) + : false); + +export const DEFAULT_DEN_BASE_URL = BUILD_DEN_BASE_URL; + +export type DenSettings = { + baseUrl: string; + apiBaseUrl?: string; + authToken?: string | null; + activeOrgId?: string | null; + activeOrgSlug?: string | null; + activeOrgName?: string | null; +}; + +type DenBaseUrls = { + baseUrl: string; + apiBaseUrl: string; +}; + +export type DenBootstrapConfig = DenBaseUrls & { + requireSignin: boolean; +}; + +export type DenDesktopConfig = SharedDesktopConfig; + +export type DenUser = { + id: string; + email: string; + name: string | null; +}; + +export type DenOrgSummary = { + id: string; + name: string; + slug: string; + role: "owner" | "admin" | "member"; +}; + +export type DenWorkerSummary = { + workerId: string; + workerName: string; + status: string; + instanceUrl: string | null; + provider: string | null; + isMine: boolean; + createdAt: string | null; +}; + +export type DenWorkerTokens = { + clientToken: string | null; + ownerToken: string | null; + hostToken: string | null; + openworkUrl: string | null; + workspaceId: string | null; +}; + +export type DenOrgLlmProviderModel = { + id: string; + name: string; + config: Record; + createdAt: string | null; +}; + +export type DenOrgLlmProvider = { + id: string; + source: "models_dev" | "custom"; + providerId: string; + name: string; + providerConfig: Record; + hasApiKey: boolean; + models: DenOrgLlmProviderModel[]; + createdAt: string | null; + updatedAt: string | null; +}; + +export type DenOrgLlmProviderConnection = DenOrgLlmProvider & { + apiKey: string | null; +}; + +export type DenPluginConfigObjectType = "skill" | "agent" | "command" | "tool" | "mcp" | "hook" | "context" | "custom"; + +export type DenPluginConfigObjectVersion = { + id: string; + rawSourceText: string | null; + normalizedPayloadJson: Record | null; + sourceRevisionRef: string | null; + createdAt: string | null; +}; + +export type DenPluginConfigObject = { + id: string; + objectType: DenPluginConfigObjectType; + title: string; + description: string | null; + currentFileName: string | null; + currentFileExtension: string | null; + currentRelativePath: string | null; + status: string; + updatedAt: string | null; + latestVersion: DenPluginConfigObjectVersion | null; +}; + +export type DenPluginMembership = { + id: string; + pluginId: string; + configObjectId: string; + configObject?: DenPluginConfigObject; +}; + +export type DenOrgPlugin = { + id: string; + name: string; + description: string | null; + status: string; + memberCount: number; + updatedAt: string | null; + componentCounts: Record; +}; + +export type DenOrgMarketplace = { + id: string; + name: string; + description: string | null; + status: string; + pluginCount: number; + updatedAt: string | null; +}; + +export type DenOrgMarketplaceResolved = { + marketplace: DenOrgMarketplace; + plugins: DenOrgPlugin[]; +}; + +export type DenOrgPluginResolved = { + plugin: DenOrgPlugin; + memberships: DenPluginMembership[]; +}; + +export type DenBillingPrice = { + amount: number | null; + currency: string | null; + recurringInterval: string | null; + recurringIntervalCount: number | null; +}; + +export type DenBillingSubscription = { + id: string; + status: string; + amount: number | null; + currency: string | null; + recurringInterval: string | null; + recurringIntervalCount: number | null; + currentPeriodStart: string | null; + currentPeriodEnd: string | null; + cancelAtPeriodEnd: boolean; + canceledAt: string | null; + endedAt: string | null; +}; + +export type DenBillingInvoice = { + id: string; + createdAt: string | null; + status: string; + totalAmount: number | null; + currency: string | null; + invoiceNumber: string | null; + invoiceUrl: string | null; +}; + +export type DenBillingSummary = { + featureGateEnabled: boolean; + hasActivePlan: boolean; + checkoutRequired: boolean; + checkoutUrl: string | null; + portalUrl: string | null; + price: DenBillingPrice | null; + subscription: DenBillingSubscription | null; + invoices: DenBillingInvoice[]; + productId: string | null; + benefitId: string | null; +}; + +type DenAuthResult = { + user: DenUser | null; + token: string | null; +}; + +export type DenDesktopHandoffExchange = { + user: DenUser | null; + token: string | null; +}; + +const defaultBootstrapBaseUrls = resolveDenBaseUrls({ + baseUrl: BUILD_DEN_BASE_URL, + apiBaseUrl: BUILD_DEN_API_BASE_URL, +}); + +let desktopBootstrapConfig: DenBootstrapConfig = { + ...defaultBootstrapBaseUrls, + requireSignin: BUILD_DEN_REQUIRE_SIGNIN, +}; + +export type DenAppVersionMetadata = { + minAppVersion: string; + latestAppVersion: string; +}; + +type RawJsonResponse = { + ok: boolean; + status: number; + json: T | null; +}; + +export class DenApiError extends Error { + status: number; + code: string; + details?: unknown; + + constructor(status: number, code: string, message: string, details?: unknown) { + super(message); + this.name = "DenApiError"; + this.status = status; + this.code = code; + this.details = details; + } +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function getDenAppVersionMetadata(payload: unknown): DenAppVersionMetadata | null { + if (!isRecord(payload)) return null; + + const latestAppVersion = + typeof payload.latestAppVersion === "string" ? payload.latestAppVersion.trim() : ""; + if (!latestAppVersion) return null; + + return { + minAppVersion: + typeof payload.minAppVersion === "string" ? payload.minAppVersion.trim() : "", + latestAppVersion, + }; +} + +export function normalizeDenDesktopConfig(payload: unknown): DenDesktopConfig { + return normalizeDesktopConfig(payload); +} + +export function normalizeDenBaseUrl(input: string | null | undefined): string | null { + const value = (input ?? "").trim(); + if (!value) return null; + try { + const url = new URL(value); + if (url.protocol !== "http:" && url.protocol !== "https:") { + return null; + } + return url.toString().replace(/\/+$/, ""); + } catch { + return null; + } +} + +function isWebAppHost(hostname: string): boolean { + const normalized = hostname.trim().toLowerCase(); + + if ( + normalized === "localhost" || + normalized === "0.0.0.0" || + normalized === "::1" || + normalized === "[::1]" || + /^127(?:\.\d{1,3}){3}$/.test(normalized) + ) { + return true; + } + + const ipv4Match = normalized.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/); + if (ipv4Match) { + const [first, second, third, fourth] = ipv4Match.slice(1).map(Number); + const octets = [first, second, third, fourth]; + if (octets.every((octet) => Number.isInteger(octet) && octet >= 0 && octet <= 255)) { + if ( + first === 10 || + first === 127 || + (first === 172 && second >= 16 && second <= 31) || + (first === 192 && second === 168) || + (first === 169 && second === 254) || + (first === 100 && second >= 64 && second <= 127) + ) { + return true; + } + } + } + + return normalized === "app.openworklabs.com" || normalized === "app.openwork.software" || normalized.startsWith("app."); +} + +function stripDenApiBasePath(input: string | null | undefined): string | null { + const normalized = normalizeDenBaseUrl(input); + if (!normalized) return null; + + try { + const url = new URL(normalized); + const pathname = url.pathname.replace(/\/+$/, ""); + const suffix = "/api/den"; + if (!pathname.toLowerCase().endsWith(suffix)) { + return normalized; + } + + const nextPathname = pathname.slice(0, -suffix.length) || "/"; + url.pathname = nextPathname; + return url.toString().replace(/\/+$/, ""); + } catch { + return normalized; + } +} + +function ensureDenApiBasePath(input: string | null | undefined): string | null { + const normalized = normalizeDenBaseUrl(input); + if (!normalized) return null; + + try { + const url = new URL(normalized); + const pathname = url.pathname.replace(/\/+$/, ""); + if (pathname.toLowerCase().endsWith("/api/den")) { + return normalized; + } + url.pathname = `${pathname}/api/den`.replace(/\/+/g, "/"); + return url.toString().replace(/\/+$/, ""); + } catch { + return normalized; + } +} + +function deriveDenApiBaseUrl(input: string | null | undefined): string { + const normalized = normalizeDenBaseUrl(input) ?? DEFAULT_DEN_BASE_URL; + + try { + const url = new URL(normalized); + const pathname = url.pathname.replace(/\/+$/, ""); + if (pathname.toLowerCase().endsWith("/api/den")) { + return normalized; + } + if (isWebAppHost(url.hostname)) { + return ensureDenApiBasePath(normalized) ?? normalized; + } + } catch { + return normalized; + } + + return normalized; +} + +export function resolveDenBaseUrls(input: { baseUrl?: string | null; apiBaseUrl?: string | null } | string | null | undefined): DenBaseUrls { + const rawBaseUrl = typeof input === "string" ? input : input?.baseUrl; + const rawApiBaseUrl = typeof input === "string" ? null : input?.apiBaseUrl; + const normalizedBaseUrl = normalizeDenBaseUrl(rawBaseUrl); + const normalizedApiBaseUrl = normalizeDenBaseUrl(rawApiBaseUrl); + const seedUrl = normalizedBaseUrl ?? normalizedApiBaseUrl ?? DEFAULT_DEN_BASE_URL; + + return { + baseUrl: stripDenApiBasePath(normalizedBaseUrl ?? seedUrl) ?? DEFAULT_DEN_BASE_URL, + apiBaseUrl: normalizedApiBaseUrl ?? deriveDenApiBaseUrl(seedUrl), + }; +} + +function resolveDenBootstrapConfig( + input: { baseUrl: string; apiBaseUrl?: string | null; requireSignin?: boolean | null }, +): DenBootstrapConfig { + return { + ...resolveDenBaseUrls(input), + requireSignin: input.requireSignin === true, + }; +} + +function syncBootstrapSettingsToLocalStorage(config: DenBootstrapConfig) { + if (typeof window === "undefined") { + return; + } + + window.localStorage.setItem(STORAGE_BASE_URL, config.baseUrl); + window.localStorage.setItem(STORAGE_API_BASE_URL, config.apiBaseUrl); +} + +function getPendingBootstrapConfig(next: DenSettings): DenBootstrapConfig | null { + if (next.baseUrl === undefined && next.apiBaseUrl === undefined) { + return null; + } + + const previous = readDenBootstrapConfig(); + return resolveDenBootstrapConfig({ + baseUrl: next.baseUrl ?? previous.baseUrl, + apiBaseUrl: next.apiBaseUrl ?? previous.apiBaseUrl, + requireSignin: previous.requireSignin, + }); +} + +function applyDesktopBootstrapConfig(config: DenBootstrapConfig) { + desktopBootstrapConfig = config; + syncBootstrapSettingsToLocalStorage(config); +} + +export function readDenBootstrapConfig(): DenBootstrapConfig { + return desktopBootstrapConfig; +} + +export async function initializeDenBootstrapConfig(): Promise { + if (!isDesktopRuntime()) { + desktopBootstrapConfig = resolveDenBootstrapConfig({ + baseUrl: BUILD_DEN_BASE_URL, + apiBaseUrl: BUILD_DEN_API_BASE_URL, + requireSignin: BUILD_DEN_REQUIRE_SIGNIN, + }); + return desktopBootstrapConfig; + } + + try { + const bootstrap = await getDesktopBootstrapConfigFromShell(); + applyDesktopBootstrapConfig(resolveDenBootstrapConfig(bootstrap)); + } catch { + desktopBootstrapConfig = resolveDenBootstrapConfig({ + baseUrl: BUILD_DEN_BASE_URL, + apiBaseUrl: BUILD_DEN_API_BASE_URL, + requireSignin: BUILD_DEN_REQUIRE_SIGNIN, + }); + syncBootstrapSettingsToLocalStorage(desktopBootstrapConfig); + } + + return desktopBootstrapConfig; +} + +export async function setDenBootstrapConfig( + next: ShellDesktopBootstrapConfig, +): Promise { + const normalized = resolveDenBootstrapConfig(next); + + if (isDesktopRuntime()) { + const persisted = await setDesktopBootstrapConfigInShell({ + baseUrl: normalized.baseUrl, + apiBaseUrl: normalized.apiBaseUrl, + requireSignin: normalized.requireSignin, + }); + applyDesktopBootstrapConfig(resolveDenBootstrapConfig(persisted)); + } else { + applyDesktopBootstrapConfig(normalized); + } + + dispatchDenSettingsChanged({ + settings: readDenSettings(), + }); + + return readDenBootstrapConfig(); +} + +export function buildDenAuthUrl(baseUrl: string, mode: "sign-in" | "sign-up"): string { + const target = new URL(resolveDenBaseUrls(baseUrl).baseUrl); + target.searchParams.set("mode", mode); + if (isDesktopDeployment()) { + target.searchParams.set("desktopAuth", "1"); + target.searchParams.set("desktopScheme", "openwork"); + } + return target.toString(); +} + +function resolveRequestBaseUrl(baseUrls: DenBaseUrls, path: string): string { + return path.startsWith("/api/") ? baseUrls.baseUrl : baseUrls.apiBaseUrl; +} + +export function readDenSettings(): DenSettings { + if (typeof window === "undefined") { + return { + ...readDenBootstrapConfig(), + authToken: null, + activeOrgId: null, + activeOrgSlug: null, + activeOrgName: null, + }; + } + + const baseUrls = resolveDenBaseUrls({ + baseUrl: window.localStorage.getItem(STORAGE_BASE_URL) ?? readDenBootstrapConfig().baseUrl, + apiBaseUrl: window.localStorage.getItem(STORAGE_API_BASE_URL) ?? readDenBootstrapConfig().apiBaseUrl, + }); + + return { + ...baseUrls, + authToken: (window.localStorage.getItem(STORAGE_AUTH_TOKEN) ?? "").trim() || null, + activeOrgId: (window.localStorage.getItem(STORAGE_ACTIVE_ORG_ID) ?? "").trim() || null, + activeOrgSlug: (window.localStorage.getItem(STORAGE_ACTIVE_ORG_SLUG) ?? "").trim() || null, + activeOrgName: (window.localStorage.getItem(STORAGE_ACTIVE_ORG_NAME) ?? "").trim() || null, + }; +} + +export function writeDenSettings(next: DenSettings, options?: { persistBootstrap?: boolean }) { + if (typeof window === "undefined") { + return; + } + + const pendingBootstrap = getPendingBootstrapConfig(next); + const previous = readDenSettings(); + const resolved = resolveDenBaseUrls(next); + const previousResolved = resolveDenBaseUrls(previous); + const baseUrl = resolved.baseUrl; + const apiBaseUrl = next.apiBaseUrl !== undefined + ? resolved.apiBaseUrl + : previousResolved.baseUrl === resolved.baseUrl + ? previous.apiBaseUrl ?? resolved.apiBaseUrl + : resolved.apiBaseUrl; + const authToken = next.authToken?.trim() ?? ""; + const activeOrgId = next.activeOrgId?.trim() ?? ""; + const activeOrgSlug = next.activeOrgSlug?.trim() ?? ""; + const activeOrgName = next.activeOrgName?.trim() ?? ""; + + if ( + previous.baseUrl === baseUrl && + (previous.apiBaseUrl ?? "") === apiBaseUrl && + (previous.authToken ?? "") === authToken && + (previous.activeOrgId ?? "") === activeOrgId && + (previous.activeOrgSlug ?? "") === activeOrgSlug && + (previous.activeOrgName ?? "") === activeOrgName + ) { + return; + } + + window.localStorage.setItem(STORAGE_BASE_URL, baseUrl); + window.localStorage.setItem(STORAGE_API_BASE_URL, apiBaseUrl); + if (authToken) { + window.localStorage.setItem(STORAGE_AUTH_TOKEN, authToken); + } else { + window.localStorage.removeItem(STORAGE_AUTH_TOKEN); + } + + if (activeOrgId) { + window.localStorage.setItem(STORAGE_ACTIVE_ORG_ID, activeOrgId); + } else { + window.localStorage.removeItem(STORAGE_ACTIVE_ORG_ID); + } + + if (activeOrgSlug) { + window.localStorage.setItem(STORAGE_ACTIVE_ORG_SLUG, activeOrgSlug); + } else { + window.localStorage.removeItem(STORAGE_ACTIVE_ORG_SLUG); + } + + if (activeOrgName) { + window.localStorage.setItem(STORAGE_ACTIVE_ORG_NAME, activeOrgName); + } else { + window.localStorage.removeItem(STORAGE_ACTIVE_ORG_NAME); + } + + if (options?.persistBootstrap !== false && pendingBootstrap) { + const currentBootstrap = readDenBootstrapConfig(); + if ( + pendingBootstrap.baseUrl !== currentBootstrap.baseUrl || + pendingBootstrap.apiBaseUrl !== currentBootstrap.apiBaseUrl + ) { + void setDenBootstrapConfig({ + baseUrl: pendingBootstrap.baseUrl, + apiBaseUrl: pendingBootstrap.apiBaseUrl, + requireSignin: currentBootstrap.requireSignin, + }).catch(() => undefined); + } + } + + dispatchDenSettingsChanged({ + settings: readDenSettings(), + }); +} + +export function clearDenSession(options?: { includeBaseUrls?: boolean }) { + if (typeof window === "undefined") { + return; + } + + if (options?.includeBaseUrls) { + window.localStorage.removeItem(STORAGE_BASE_URL); + window.localStorage.removeItem(STORAGE_API_BASE_URL); + } + + window.localStorage.removeItem(STORAGE_AUTH_TOKEN); + window.localStorage.removeItem(STORAGE_ACTIVE_ORG_ID); + window.localStorage.removeItem(STORAGE_ACTIVE_ORG_SLUG); + window.localStorage.removeItem(STORAGE_ACTIVE_ORG_NAME); + + dispatchDenSettingsChanged({ + settings: readDenSettings(), + }); +} + +export async function ensureDenActiveOrganization(options?: { forceServerSync?: boolean }) { + const settings = readDenSettings(); + const token = settings.authToken?.trim() ?? ""; + if (!token) { + return null; + } + + const client = createDenClient({ + baseUrl: settings.baseUrl, + apiBaseUrl: settings.apiBaseUrl, + token, + }); + + const response = await client.listOrgs(); + const targetOrg = + response.orgs.find((org) => org.id === response.activeOrgId) ?? + response.orgs.find((org) => org.slug === response.activeOrgSlug) ?? + response.orgs[0] ?? + null; + + if (!targetOrg) { + writeDenSettings({ + ...settings, + activeOrgId: null, + activeOrgSlug: null, + activeOrgName: null, + }, { persistBootstrap: false }); + return null; + } + + if ( + options?.forceServerSync && + (!response.activeOrgId || response.activeOrgId !== targetOrg.id) + ) { + await client.setActiveOrganization({ organizationId: targetOrg.id }); + } + + writeDenSettings({ + ...settings, + activeOrgId: targetOrg.id, + activeOrgSlug: targetOrg.slug, + activeOrgName: targetOrg.name, + }, { persistBootstrap: false }); + + return targetOrg; +} + +function getErrorMessage(payload: unknown, fallback: string): string { + if (typeof payload === "string" && payload.trim()) { + return payload.trim(); + } + + if (!isRecord(payload)) { + return fallback; + } + + if (typeof payload.message === "string" && payload.message.trim()) { + return payload.message.trim(); + } + + if (typeof payload.error === "string" && payload.error.trim()) { + return payload.error.trim(); + } + + return fallback; +} + +function getUser(payload: unknown): DenUser | null { + if (!isRecord(payload) || !isRecord(payload.user)) { + return null; + } + + const user = payload.user; + if (typeof user.id !== "string" || typeof user.email !== "string") { + return null; + } + + return { + id: user.id, + email: user.email, + name: typeof user.name === "string" ? user.name : null, + }; +} + +function getToken(payload: unknown): string | null { + if (!isRecord(payload) || typeof payload.token !== "string") { + return null; + } + return payload.token.trim() || null; +} + +function getOrgList(payload: unknown): DenOrgSummary[] { + if (!isRecord(payload) || !Array.isArray(payload.orgs)) { + return []; + } + + return payload.orgs + .map((entry) => { + if (!isRecord(entry)) return null; + if ( + typeof entry.id !== "string" || + typeof entry.name !== "string" || + typeof entry.slug !== "string" || + (entry.role !== "owner" && entry.role !== "admin" && entry.role !== "member") + ) { + return null; + } + + return { + id: entry.id, + name: entry.name, + slug: entry.slug, + role: entry.role, + } satisfies DenOrgSummary; + }) + .filter((entry): entry is DenOrgSummary => Boolean(entry)); +} + +function getWorkers(payload: unknown): DenWorkerSummary[] { + if (!isRecord(payload) || !Array.isArray(payload.workers)) { + return []; + } + + return payload.workers + .map((entry) => { + if (!isRecord(entry)) return null; + const instance = isRecord(entry.instance) ? entry.instance : null; + if (typeof entry.id !== "string" || typeof entry.name !== "string") { + return null; + } + return { + workerId: entry.id, + workerName: entry.name, + status: typeof entry.status === "string" ? entry.status : "unknown", + instanceUrl: instance && typeof instance.url === "string" ? instance.url : null, + provider: instance && typeof instance.provider === "string" ? instance.provider : null, + isMine: Boolean(entry.isMine), + createdAt: typeof entry.createdAt === "string" ? entry.createdAt : null, + } satisfies DenWorkerSummary; + }) + .filter((entry): entry is DenWorkerSummary => Boolean(entry)); +} + +function getWorkerTokens(payload: unknown): DenWorkerTokens | null { + if (!isRecord(payload) || !isRecord(payload.tokens)) { + return null; + } + + const tokens = payload.tokens; + const connect = isRecord(payload.connect) ? payload.connect : null; + return { + clientToken: typeof tokens.client === "string" ? tokens.client : null, + ownerToken: typeof tokens.owner === "string" ? tokens.owner : null, + hostToken: typeof tokens.host === "string" ? tokens.host : null, + openworkUrl: connect && typeof connect.openworkUrl === "string" ? connect.openworkUrl : null, + workspaceId: connect && typeof connect.workspaceId === "string" ? connect.workspaceId : null, + }; +} + +function parseDenOrgSkillRow(record: Record, hubName: string | null): DenOrgSkillCard | null { + if (typeof record.id !== "string" || typeof record.title !== "string" || typeof record.skillText !== "string") { + return null; + } + const description = typeof record.description === "string" ? record.description : null; + const shared = record.shared === "org" || record.shared === "public" ? record.shared : null; + return { + id: record.id, + title: record.title, + description, + skillText: record.skillText, + hubName, + shared, + updatedAt: typeof record.updatedAt === "string" ? record.updatedAt : null, + }; +} + +function getDenOrgSkillsFromPayload(payload: unknown): DenOrgSkillCard[] { + if (!isRecord(payload) || !Array.isArray(payload.skills)) { + return []; + } + return payload.skills + .map((entry) => (isRecord(entry) ? parseDenOrgSkillRow(entry, null) : null)) + .filter((entry): entry is DenOrgSkillCard => entry !== null); +} + +export type DenOrgSkillHub = { id: string; name: string; skills: DenOrgSkillCard[] }; + +function parseOrgSkillHubEntry(hub: Record): DenOrgSkillHub | null { + const hubId = hub.id; + const hubName = hub.name; + const hubSkills = hub.skills; + if (typeof hubId !== "string" || typeof hubName !== "string" || !Array.isArray(hubSkills)) { + return null; + } + const skills = hubSkills + .map((s) => (isRecord(s) ? parseDenOrgSkillRow(s, hubName) : null)) + .filter((s): s is DenOrgSkillCard => s !== null); + return { id: hubId, name: hubName, skills }; +} + +function getDenOrgSkillHubsFromPayload(payload: unknown): DenOrgSkillHub[] { + if (!isRecord(payload) || !Array.isArray(payload.skillHubs)) { + return []; + } + return payload.skillHubs + .map((entry) => (isRecord(entry) ? parseOrgSkillHubEntry(entry) : null)) + .filter((e): e is DenOrgSkillHub => e !== null); +} + +function parseDenOrgLlmProviderModel(value: unknown): DenOrgLlmProviderModel | null { + if (!isRecord(value) || typeof value.id !== "string" || typeof value.name !== "string") { + return null; + } + + return { + id: value.id, + name: value.name, + config: isRecord(value.config) ? value.config : {}, + createdAt: typeof value.createdAt === "string" ? value.createdAt : null, + }; +} + +function parseDenOrgLlmProvider(value: unknown): DenOrgLlmProvider | null { + if ( + !isRecord(value) || + typeof value.id !== "string" || + typeof value.providerId !== "string" || + typeof value.name !== "string" || + (value.source !== "models_dev" && value.source !== "custom") + ) { + return null; + } + + return { + id: value.id, + source: value.source, + providerId: value.providerId, + name: value.name, + providerConfig: isRecord(value.providerConfig) ? value.providerConfig : {}, + hasApiKey: value.hasApiKey === true, + models: Array.isArray(value.models) + ? value.models.map(parseDenOrgLlmProviderModel).filter((entry): entry is DenOrgLlmProviderModel => entry !== null) + : [], + createdAt: typeof value.createdAt === "string" ? value.createdAt : null, + updatedAt: typeof value.updatedAt === "string" ? value.updatedAt : null, + }; +} + +function getDenOrgLlmProviders(payload: unknown): DenOrgLlmProvider[] { + if (!isRecord(payload) || !Array.isArray(payload.llmProviders)) { + return []; + } + + return payload.llmProviders + .map(parseDenOrgLlmProvider) + .filter((entry): entry is DenOrgLlmProvider => entry !== null); +} + +function getDenOrgLlmProviderConnection(payload: unknown): DenOrgLlmProviderConnection | null { + if (!isRecord(payload) || !payload.llmProvider) { + return null; + } + + const provider = parseDenOrgLlmProvider(payload.llmProvider); + if (!provider || !isRecord(payload.llmProvider)) { + return null; + } + + return { + ...provider, + apiKey: typeof payload.llmProvider.apiKey === "string" ? payload.llmProvider.apiKey : null, + }; +} + +function parsePluginConfigObjectType(value: unknown): DenPluginConfigObjectType | null { + return value === "skill" || value === "agent" || value === "command" || value === "tool" || + value === "mcp" || value === "hook" || value === "context" || value === "custom" + ? value + : null; +} + +function parsePluginConfigObjectVersion(value: unknown): DenPluginConfigObjectVersion | null { + if (!isRecord(value) || typeof value.id !== "string") return null; + return { + id: value.id, + rawSourceText: typeof value.rawSourceText === "string" ? value.rawSourceText : null, + normalizedPayloadJson: isRecord(value.normalizedPayloadJson) ? value.normalizedPayloadJson : null, + sourceRevisionRef: typeof value.sourceRevisionRef === "string" ? value.sourceRevisionRef : null, + createdAt: typeof value.createdAt === "string" ? value.createdAt : null, + }; +} + +function parsePluginConfigObject(value: unknown): DenPluginConfigObject | null { + if (!isRecord(value) || typeof value.id !== "string" || typeof value.title !== "string") return null; + const objectType = parsePluginConfigObjectType(value.objectType); + if (!objectType) return null; + return { + id: value.id, + objectType, + title: value.title, + description: typeof value.description === "string" ? value.description : null, + currentFileName: typeof value.currentFileName === "string" ? value.currentFileName : null, + currentFileExtension: typeof value.currentFileExtension === "string" ? value.currentFileExtension : null, + currentRelativePath: typeof value.currentRelativePath === "string" ? value.currentRelativePath : null, + status: typeof value.status === "string" ? value.status : "active", + updatedAt: typeof value.updatedAt === "string" ? value.updatedAt : null, + latestVersion: parsePluginConfigObjectVersion(value.latestVersion), + }; +} + +function parseOrgPlugin(value: unknown): DenOrgPlugin | null { + if (!isRecord(value) || typeof value.id !== "string" || typeof value.name !== "string") return null; + const counts = isRecord(value.componentCounts) + ? Object.fromEntries( + Object.entries(value.componentCounts).filter((entry): entry is [string, number] => + typeof entry[0] === "string" && typeof entry[1] === "number" && Number.isFinite(entry[1]) && entry[1] >= 0, + ), + ) + : {}; + return { + id: value.id, + name: value.name, + description: typeof value.description === "string" ? value.description : null, + status: typeof value.status === "string" ? value.status : "active", + memberCount: typeof value.memberCount === "number" && Number.isFinite(value.memberCount) ? value.memberCount : 0, + updatedAt: typeof value.updatedAt === "string" ? value.updatedAt : null, + componentCounts: counts, + }; +} + +function parseOrgMarketplace(value: unknown): DenOrgMarketplace | null { + if (!isRecord(value) || typeof value.id !== "string" || typeof value.name !== "string") return null; + return { + id: value.id, + name: value.name, + description: typeof value.description === "string" ? value.description : null, + status: typeof value.status === "string" ? value.status : "active", + pluginCount: typeof value.pluginCount === "number" && Number.isFinite(value.pluginCount) ? value.pluginCount : 0, + updatedAt: typeof value.updatedAt === "string" ? value.updatedAt : null, + }; +} + +function parsePluginMembership(value: unknown): DenPluginMembership | null { + if (!isRecord(value) || typeof value.id !== "string" || typeof value.pluginId !== "string" || typeof value.configObjectId !== "string") { + return null; + } + const configObject = parsePluginConfigObject(value.configObject); + return { + id: value.id, + pluginId: value.pluginId, + configObjectId: value.configObjectId, + ...(configObject ? { configObject } : {}), + }; +} + +function getOrgMarketplaces(payload: unknown): DenOrgMarketplace[] { + if (!isRecord(payload) || !Array.isArray(payload.items)) return []; + return payload.items.map(parseOrgMarketplace).filter((entry): entry is DenOrgMarketplace => entry !== null); +} + +function getOrgMarketplaceResolved(payload: unknown): DenOrgMarketplaceResolved | null { + if (!isRecord(payload) || !isRecord(payload.item)) return null; + const marketplace = parseOrgMarketplace(payload.item.marketplace); + if (!marketplace || !Array.isArray(payload.item.plugins)) return null; + return { + marketplace, + plugins: payload.item.plugins.map(parseOrgPlugin).filter((entry): entry is DenOrgPlugin => entry !== null), + }; +} + +function getOrgPluginResolved(plugin: DenOrgPlugin, payload: unknown): DenOrgPluginResolved { + const memberships = isRecord(payload) && Array.isArray(payload.items) + ? payload.items.map(parsePluginMembership).filter((entry): entry is DenPluginMembership => entry !== null) + : []; + return { plugin, memberships }; +} + +function getBillingPrice(value: unknown): DenBillingPrice | null { + if (!isRecord(value)) { + return null; + } + + return { + amount: typeof value.amount === "number" ? value.amount : null, + currency: typeof value.currency === "string" ? value.currency : null, + recurringInterval: typeof value.recurringInterval === "string" ? value.recurringInterval : null, + recurringIntervalCount: typeof value.recurringIntervalCount === "number" ? value.recurringIntervalCount : null, + }; +} + +function getBillingSubscription(value: unknown): DenBillingSubscription | null { + if (!isRecord(value) || typeof value.id !== "string") { + return null; + } + + return { + id: value.id, + status: typeof value.status === "string" ? value.status : "unknown", + amount: typeof value.amount === "number" ? value.amount : null, + currency: typeof value.currency === "string" ? value.currency : null, + recurringInterval: typeof value.recurringInterval === "string" ? value.recurringInterval : null, + recurringIntervalCount: typeof value.recurringIntervalCount === "number" ? value.recurringIntervalCount : null, + currentPeriodStart: typeof value.currentPeriodStart === "string" ? value.currentPeriodStart : null, + currentPeriodEnd: typeof value.currentPeriodEnd === "string" ? value.currentPeriodEnd : null, + cancelAtPeriodEnd: value.cancelAtPeriodEnd === true, + canceledAt: typeof value.canceledAt === "string" ? value.canceledAt : null, + endedAt: typeof value.endedAt === "string" ? value.endedAt : null, + }; +} + +function getBillingInvoice(value: unknown): DenBillingInvoice | null { + if (!isRecord(value) || typeof value.id !== "string") { + return null; + } + + return { + id: value.id, + createdAt: typeof value.createdAt === "string" ? value.createdAt : null, + status: typeof value.status === "string" ? value.status : "unknown", + totalAmount: typeof value.totalAmount === "number" ? value.totalAmount : null, + currency: typeof value.currency === "string" ? value.currency : null, + invoiceNumber: typeof value.invoiceNumber === "string" ? value.invoiceNumber : null, + invoiceUrl: typeof value.invoiceUrl === "string" ? value.invoiceUrl : null, + }; +} + +export type DenOrgSkillHubSummary = { + id: string; + name: string; + canManage: boolean; +}; + +function getOrgSkillHubSummaries(payload: unknown): DenOrgSkillHubSummary[] { + if (!isRecord(payload) || !Array.isArray(payload.skillHubs)) { + return []; + } + + return payload.skillHubs + .map((entry) => { + if (!isRecord(entry)) return null; + if (typeof entry.id !== "string" || typeof entry.name !== "string" || typeof entry.canManage !== "boolean") { + return null; + } + return { id: entry.id, name: entry.name, canManage: entry.canManage }; + }) + .filter((entry): entry is DenOrgSkillHubSummary => Boolean(entry)); +} + +function getCreatedOrgSkillId(payload: unknown): string | null { + if (!isRecord(payload) || !isRecord(payload.skill)) return null; + return typeof payload.skill.id === "string" ? payload.skill.id : null; +} + +function getBillingSummary(payload: unknown): DenBillingSummary | null { + if (!isRecord(payload) || !isRecord(payload.billing)) { + return null; + } + + const billing = payload.billing; + if ( + typeof billing.featureGateEnabled !== "boolean" || + typeof billing.hasActivePlan !== "boolean" || + typeof billing.checkoutRequired !== "boolean" + ) { + return null; + } + + return { + featureGateEnabled: billing.featureGateEnabled, + hasActivePlan: billing.hasActivePlan, + checkoutRequired: billing.checkoutRequired, + checkoutUrl: typeof billing.checkoutUrl === "string" ? billing.checkoutUrl : null, + portalUrl: typeof billing.portalUrl === "string" ? billing.portalUrl : null, + price: getBillingPrice(billing.price), + subscription: getBillingSubscription(billing.subscription), + invoices: Array.isArray(billing.invoices) + ? billing.invoices.map((item) => getBillingInvoice(item)).filter((item): item is DenBillingInvoice => item !== null) + : [], + productId: typeof billing.productId === "string" ? billing.productId : null, + benefitId: typeof billing.benefitId === "string" ? billing.benefitId : null, + }; +} + +const resolveFetch = () => (isDesktopRuntime() ? desktopFetch : globalThis.fetch); + +type FetchLike = (input: RequestInfo | URL, init?: RequestInit) => Promise; + +async function fetchWithTimeout(fetchImpl: FetchLike, url: string, init: RequestInit, timeoutMs: number) { + if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) { + return fetchImpl(url, init); + } + + const controller = typeof AbortController !== "undefined" ? new AbortController() : null; + const signal = controller?.signal; + const initWithSignal = signal && !init.signal ? { ...init, signal } : init; + + let timeoutId: ReturnType | null = null; + const timeoutPromise = new Promise((_, reject) => { + timeoutId = setTimeout(() => { + try { + controller?.abort(); + } catch { + // ignore + } + reject(new Error("Request timed out.")); + }, timeoutMs); + }); + + try { + return await Promise.race([fetchImpl(url, initWithSignal), timeoutPromise]); + } finally { + if (timeoutId) clearTimeout(timeoutId); + } +} + +async function requestJsonRaw( + input: string | DenBaseUrls, + path: string, + options: { method?: string; token?: string | null; body?: unknown; timeoutMs?: number } = {}, +): Promise> { + const baseUrls = typeof input === "string" ? resolveDenBaseUrls(input) : input; + const url = `${resolveRequestBaseUrl(baseUrls, path)}${path}`; + const headers: Record = { Accept: "application/json" }; + const token = options.token?.trim() ?? ""; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + if (options.body !== undefined) { + headers["Content-Type"] = "application/json"; + } + + const response = await fetchWithTimeout( + resolveFetch(), + url, + { + method: options.method ?? "GET", + headers, + body: options.body === undefined ? undefined : JSON.stringify(options.body), + credentials: "include", + }, + options.timeoutMs ?? DEFAULT_DEN_TIMEOUT_MS, + ); + + const text = await response.text(); + let json: T | null = null; + try { + json = text ? (JSON.parse(text) as T) : null; + } catch { + json = null; + } + return { ok: response.ok, status: response.status, json }; +} + +async function requestJson( + input: string | DenBaseUrls, + path: string, + options: { method?: string; token?: string | null; body?: unknown; timeoutMs?: number } = {}, +): Promise { + const raw = await requestJsonRaw(input, path, options); + if (!raw.ok) { + const payload = raw.json; + const code = isRecord(payload) && typeof payload.error === "string" ? payload.error : "request_failed"; + const message = getErrorMessage(payload, `Request failed with ${raw.status}.`); + throw new DenApiError(raw.status, code, message, isRecord(payload) ? payload.details : undefined); + } + return raw.json as T; +} + +async function ensureActiveOrganization( + baseUrls: DenBaseUrls, + token: string | null, + input: { organizationId?: string | null; organizationSlug?: string | null }, +) { + const organizationId = input.organizationId?.trim() ?? ""; + const organizationSlug = input.organizationSlug?.trim() ?? ""; + if (!token || (!organizationId && !organizationSlug)) { + return; + } + + await requestJson(baseUrls, "/api/auth/organization/set-active", { + method: "POST", + token, + body: { + organizationId: organizationId || undefined, + organizationSlug: organizationSlug || undefined, + }, + }); +} + +export function createDenClient(options: { baseUrl: string; apiBaseUrl?: string | null; token?: string | null }) { + const baseUrls = resolveDenBaseUrls({ + baseUrl: options.baseUrl, + apiBaseUrl: options.apiBaseUrl, + }); + const token = options.token?.trim() ?? null; + + return { + async setActiveOrganization(input: { organizationId?: string | null; organizationSlug?: string | null }): Promise { + await ensureActiveOrganization(baseUrls, token, input); + }, + + async signInEmail(email: string, password: string): Promise { + const payload = await requestJson(baseUrls, "/api/auth/sign-in/email", { + method: "POST", + body: { + email: email.trim(), + password, + }, + }); + return { user: getUser(payload), token: getToken(payload) }; + }, + + async signUpEmail(email: string, password: string): Promise { + const payload = await requestJson(baseUrls, "/api/auth/sign-up/email", { + method: "POST", + body: { + name: DEFAULT_DEN_AUTH_NAME, + email: email.trim(), + password, + }, + }); + return { user: getUser(payload), token: getToken(payload) }; + }, + + async signOut() { + await requestJsonRaw(baseUrls, "/api/auth/sign-out", { + method: "POST", + token, + body: {}, + }); + }, + + async getSession(): Promise { + const payload = await requestJson(baseUrls, "/v1/me", { + method: "GET", + token, + }); + const user = getUser(payload); + if (!user) { + throw new DenApiError(500, "invalid_session_payload", "Session response did not include a user."); + } + return user; + }, + + async getAppVersionMetadata(): Promise { + const payload = await requestJson(baseUrls, "/v1/app-version", { + method: "GET", + }); + const appVersionMetadata = getDenAppVersionMetadata(payload); + if (!appVersionMetadata) { + throw new DenApiError(500, "invalid_app_version_payload", "App version response was missing version details."); + } + return appVersionMetadata; + }, + + async getDesktopConfig(): Promise { + const payload = await requestJson(baseUrls, "/v1/me/desktop-config", { + method: "GET", + token, + }); + return normalizeDenDesktopConfig(payload); + }, + + async exchangeDesktopHandoff(grant: string): Promise { + const payload = await requestJson(baseUrls, "/v1/auth/desktop-handoff/exchange", { + method: "POST", + body: { grant }, + }); + return { user: getUser(payload), token: getToken(payload) }; + }, + + async listOrgs(): Promise<{ orgs: DenOrgSummary[]; activeOrgId: string | null; activeOrgSlug: string | null; defaultOrgId: string | null }> { + const payload = await requestJson(baseUrls, "/v1/me/orgs", { + method: "GET", + token, + }); + + const activeOrgId = isRecord(payload) && typeof payload.activeOrgId === "string" + ? payload.activeOrgId + : null; + const activeOrgSlug = isRecord(payload) && typeof payload.activeOrgSlug === "string" + ? payload.activeOrgSlug + : null; + + return { + orgs: getOrgList(payload), + activeOrgId, + activeOrgSlug, + defaultOrgId: activeOrgId, + }; + }, + + async listWorkers(orgId: string, limit = 20): Promise { + const params = new URLSearchParams(); + params.set("limit", String(limit)); + const payload = await requestJson(baseUrls, `/v1/workers?${params.toString()}`, { + method: "GET", + token, + }); + return getWorkers(payload); + }, + + async getWorkerTokens(workerId: string, orgId: string): Promise { + const payload = await requestJson(baseUrls, `/v1/workers/${encodeURIComponent(workerId)}/tokens`, { + method: "POST", + token, + body: {}, + }); + const tokens = getWorkerTokens(payload); + if (!tokens) { + throw new DenApiError(500, "invalid_worker_token_payload", "Worker token response was missing token values."); + } + return tokens; + }, + + async listOrgSkills(orgId: string): Promise { + const payload = await requestJson(baseUrls, "/v1/skills", { + method: "GET", + token, + }); + return getDenOrgSkillsFromPayload(payload); + }, + + async listOrgSkillHubs(orgId: string): Promise { + const payload = await requestJson(baseUrls, "/v1/skill-hubs", { + method: "GET", + token, + }); + return getDenOrgSkillHubsFromPayload(payload); + }, + + async listOrgSkillHubSummaries(orgId: string): Promise { + const payload = await requestJson(baseUrls, "/v1/skill-hubs", { + method: "GET", + token, + }); + return getOrgSkillHubSummaries(payload); + }, + + async createOrgSkill( + orgId: string, + input: { skillText: string; shared?: "org" | "public" | null }, + ): Promise<{ id: string }> { + const body = { + skillText: input.skillText, + shared: input.shared === undefined ? ("org" as const) : input.shared, + }; + const payload = await requestJson(baseUrls, "/v1/skills", { + method: "POST", + token, + body, + }); + const id = getCreatedOrgSkillId(payload); + if (!id) { + throw new DenApiError(500, "invalid_skill_payload", "Skill response was missing id."); + } + return { id }; + }, + + async addOrgSkillToHub(orgId: string, skillHubId: string, skillId: string): Promise { + await requestJson( + baseUrls, + `/v1/skill-hubs/${encodeURIComponent(skillHubId)}/skills`, + { + method: "POST", + token, + body: { skillId }, + }, + ); + }, + + async listOrgLlmProviders(orgId: string): Promise { + const payload = await requestJson(baseUrls, "/v1/llm-providers", { + method: "GET", + token, + }); + return getDenOrgLlmProviders(payload); + }, + + async getOrgLlmProviderConnection(orgId: string, llmProviderId: string): Promise { + const payload = await requestJson( + baseUrls, + `/v1/llm-providers/${encodeURIComponent(llmProviderId)}/connect`, + { + method: "GET", + token, + }, + ); + const provider = getDenOrgLlmProviderConnection(payload); + if (!provider) { + throw new DenApiError(500, "invalid_llm_provider_payload", "LLM provider response was missing connection details."); + } + return provider; + }, + + async listOrgMarketplaces(orgId: string): Promise { + const payload = await requestJson( + baseUrls, + `/v1/marketplaces?status=active&limit=100`, + { method: "GET", token }, + ); + return getOrgMarketplaces(payload); + }, + + async getOrgMarketplaceResolved(orgId: string, marketplaceId: string): Promise { + const payload = await requestJson( + baseUrls, + `/v1/marketplaces/${encodeURIComponent(marketplaceId)}/resolved`, + { method: "GET", token }, + ); + const resolved = getOrgMarketplaceResolved(payload); + if (!resolved) { + throw new DenApiError(500, "invalid_marketplace_payload", "Marketplace response was missing plugin details."); + } + return resolved; + }, + + async getOrgPluginResolved(orgId: string, plugin: DenOrgPlugin): Promise { + const payload = await requestJson( + baseUrls, + `/v1/plugins/${encodeURIComponent(plugin.id)}/resolved`, + { method: "GET", token }, + ); + return getOrgPluginResolved(plugin, payload); + }, + + async getBillingStatus(options: { includeCheckout?: boolean; includePortal?: boolean; includeInvoices?: boolean } = {}): Promise { + const params = new URLSearchParams(); + if (options.includeCheckout) { + params.set("includeCheckout", "1"); + } + if (options.includePortal === false) { + params.set("excludePortal", "1"); + } + if (options.includeInvoices === false) { + params.set("excludeInvoices", "1"); + } + + const path = params.size > 0 ? `/v1/workers/billing?${params.toString()}` : "/v1/workers/billing"; + const payload = await requestJson(baseUrls, path, { + method: "GET", + token, + }); + const summary = getBillingSummary(payload); + if (!summary) { + throw new DenApiError(500, "invalid_billing_payload", "Billing response was missing details."); + } + return summary; + }, + + async updateSubscriptionCancellation(cancelAtPeriodEnd: boolean): Promise<{ subscription: DenBillingSubscription | null; billing: DenBillingSummary }> { + const payload = await requestJson(baseUrls, "/v1/workers/billing/subscription", { + method: "POST", + token, + body: { cancelAtPeriodEnd }, + }); + const billing = getBillingSummary(payload); + if (!billing) { + throw new DenApiError(500, "invalid_billing_payload", "Subscription update response was missing billing details."); + } + + return { + subscription: isRecord(payload) ? getBillingSubscription(payload.subscription) : null, + billing, + }; + }, + }; +} + +export async function fetchDenOrgSkillsCatalog( + client: ReturnType, + orgId: string, +): Promise { + const [hubs, flatSkills] = await Promise.all([client.listOrgSkillHubs(orgId), client.listOrgSkills(orgId)]); + const hubNameBySkillId = new Map(); + for (const hub of hubs) { + for (const skill of hub.skills) { + if (!hubNameBySkillId.has(skill.id)) { + hubNameBySkillId.set(skill.id, hub.name); + } + } + } + const byId = new Map(); + for (const skill of flatSkills) { + byId.set(skill.id, { + ...skill, + hubName: hubNameBySkillId.get(skill.id) ?? null, + }); + } + return [...byId.values()].sort((a, b) => a.title.localeCompare(b.title)); +} diff --git a/apps/app/src/app/lib/desktop-tauri.ts b/apps/app/src/app/lib/desktop-tauri.ts new file mode 100644 index 0000000000..417ec66a1c --- /dev/null +++ b/apps/app/src/app/lib/desktop-tauri.ts @@ -0,0 +1,776 @@ +import { invoke } from "@tauri-apps/api/core"; +import { fetch as tauriFetch } from "@tauri-apps/plugin-http"; +import { validateMcpServerName } from "../mcp"; +import { applyWebviewZoom } from "./font-zoom"; +import { nativeDeepLinkEvent } from "./deep-link-bridge"; + +export const desktopFetch = tauriFetch as unknown as typeof globalThis.fetch; + +export async function openDesktopUrl(url: string): Promise { + const { openUrl } = await import("@tauri-apps/plugin-opener"); + await openUrl(url); +} + +export async function openDesktopPath(target: string): Promise { + const { openPath } = await import("@tauri-apps/plugin-opener"); + await openPath(target); +} + +export async function revealDesktopItemInDir(target: string): Promise { + const { revealItemInDir } = await import("@tauri-apps/plugin-opener"); + await revealItemInDir(target); +} + +export async function relaunchDesktopApp(): Promise { + const { relaunch } = await import("@tauri-apps/plugin-process"); + await relaunch(); +} + +export async function getDesktopHomeDir(): Promise { + const { homeDir } = await import("@tauri-apps/api/path"); + return homeDir(); +} + +export async function joinDesktopPath(...parts: string[]): Promise { + const { join } = await import("@tauri-apps/api/path"); + return join(...parts); +} + +export async function setDesktopZoomFactor(value: number): Promise { + try { + const { getCurrentWebview } = await import("@tauri-apps/api/webview"); + const webview = getCurrentWebview(); + await applyWebviewZoom(webview, value); + return true; + } catch { + return false; + } +} + +export async function subscribeDesktopDeepLinks( + handler: (urls: string[]) => void, +): Promise<() => void> { + const [{ getCurrent, onOpenUrl }, { listen }] = await Promise.all([ + import("@tauri-apps/plugin-deep-link"), + import("@tauri-apps/api/event"), + ]); + + const startUrls = await getCurrent().catch(() => null); + if (Array.isArray(startUrls)) { + handler(startUrls); + } + + const deepLinkUnlisten = await onOpenUrl((urls) => { + handler(urls); + }).catch(() => () => undefined); + + const eventUnlisten = await listen(nativeDeepLinkEvent, (event) => { + if (Array.isArray(event.payload)) { + handler(event.payload); + } + }).catch(() => () => undefined); + + return () => { + void deepLinkUnlisten(); + void eventUnlisten(); + }; +} + +export type EngineInfo = { + running: boolean; + runtime: "direct"; + baseUrl: string | null; + projectDir: string | null; + hostname: string | null; + port: number | null; + opencodeUsername: string | null; + opencodePassword: string | null; + opencodeBinPath: string | null; + opencodeBinSource: string | null; + pid: number | null; + lastStdout: string | null; + lastStderr: string | null; +}; + +export type OpenworkServerInfo = { + running: boolean; + remoteAccessEnabled: boolean; + host: string | null; + port: number | null; + baseUrl: string | null; + connectUrl: string | null; + mdnsUrl: string | null; + lanUrl: string | null; + clientToken: string | null; + ownerToken: string | null; + hostToken: string | null; + managedOpencodeBinPath: string | null; + managedOpencodeBinSource: string | null; + pid: number | null; + lastStdout: string | null; + lastStderr: string | null; +}; + +export type EngineDoctorResult = { + found: boolean; + inPath: boolean; + resolvedPath: string | null; + resolvedSource: string | null; + version: string | null; + supportsServe: boolean; + notes: string[]; + serveHelpStatus: number | null; + serveHelpStdout: string | null; + serveHelpStderr: string | null; +}; + +export type WorkspaceInfo = { + id: string; + name: string; + path: string; + preset: string; + workspaceType: "local" | "remote"; + remoteType?: "openwork" | "opencode" | null; + baseUrl?: string | null; + directory?: string | null; + displayName?: string | null; + openworkHostUrl?: string | null; + openworkToken?: string | null; + openworkClientToken?: string | null; + openworkHostToken?: string | null; + openworkWorkspaceId?: string | null; + openworkWorkspaceName?: string | null; + + // Sandbox lifecycle metadata (desktop-managed) + sandboxBackend?: "docker" | "microsandbox" | null; + sandboxRunId?: string | null; + sandboxContainerName?: string | null; +}; + +export type WorkspaceList = { + // UI-selected workspace persisted by the desktop shell. + selectedId?: string; + // Runtime/watch target currently followed by the desktop host. + watchedId?: string | null; + // Legacy desktop payloads used activeId for the UI-selected workspace. + activeId?: string | null; + workspaces: WorkspaceInfo[]; +}; + +export function resolveWorkspaceListSelectedId( + list: Pick | null | undefined, +): string { + return list?.selectedId?.trim() || list?.activeId?.trim() || ""; +} + +export type WorkspaceExportSummary = { + outputPath: string; + included: number; + excluded: string[]; +}; + +export async function engineStart( + projectDir: string, + options?: { + preferSidecar?: boolean; + runtime?: "direct"; + workspacePaths?: string[]; + opencodeBinPath?: string | null; + opencodeEnableExa?: boolean; + openworkRemoteAccess?: boolean; + }, +): Promise { + return invoke("engine_start", { + projectDir, + preferSidecar: options?.preferSidecar ?? true, + opencodeBinPath: options?.opencodeBinPath ?? null, + opencodeEnableExa: options?.opencodeEnableExa ?? null, + openworkRemoteAccess: options?.openworkRemoteAccess ?? null, + runtime: options?.runtime ?? null, + workspacePaths: options?.workspacePaths ?? null, + }); +} + +export async function workspaceBootstrap(): Promise { + return invoke("workspace_bootstrap"); +} + +export async function workspaceSetSelected(workspaceId: string): Promise { + return invoke("workspace_set_selected", { workspaceId }); +} + +export async function workspaceSetRuntimeActive(workspaceId: string | null): Promise { + return invoke("workspace_set_runtime_active", { workspaceId: workspaceId ?? "" }); +} + +export async function workspaceCreate(input: { + folderPath: string; + name: string; + preset: string; +}): Promise { + return invoke("workspace_create", { + folderPath: input.folderPath, + name: input.name, + preset: input.preset, + }); +} + +export async function workspaceCreateRemote(input: { + baseUrl: string; + directory?: string | null; + displayName?: string | null; + remoteType?: "openwork" | "opencode" | null; + openworkHostUrl?: string | null; + openworkToken?: string | null; + openworkClientToken?: string | null; + openworkHostToken?: string | null; + openworkWorkspaceId?: string | null; + openworkWorkspaceName?: string | null; + + // Sandbox lifecycle metadata (desktop-managed) + sandboxBackend?: "docker" | "microsandbox" | null; + sandboxRunId?: string | null; + sandboxContainerName?: string | null; +}): Promise { + return invoke("workspace_create_remote", { + baseUrl: input.baseUrl, + directory: input.directory ?? null, + displayName: input.displayName ?? null, + remoteType: input.remoteType ?? null, + openworkHostUrl: input.openworkHostUrl ?? null, + openworkToken: input.openworkToken ?? null, + openworkClientToken: input.openworkClientToken ?? null, + openworkHostToken: input.openworkHostToken ?? null, + openworkWorkspaceId: input.openworkWorkspaceId ?? null, + openworkWorkspaceName: input.openworkWorkspaceName ?? null, + sandboxBackend: input.sandboxBackend ?? null, + sandboxRunId: input.sandboxRunId ?? null, + sandboxContainerName: input.sandboxContainerName ?? null, + }); +} + +export async function workspaceUpdateRemote(input: { + workspaceId: string; + baseUrl?: string | null; + directory?: string | null; + displayName?: string | null; + remoteType?: "openwork" | "opencode" | null; + openworkHostUrl?: string | null; + openworkToken?: string | null; + openworkClientToken?: string | null; + openworkHostToken?: string | null; + openworkWorkspaceId?: string | null; + openworkWorkspaceName?: string | null; + + // Sandbox lifecycle metadata (desktop-managed) + sandboxBackend?: "docker" | "microsandbox" | null; + sandboxRunId?: string | null; + sandboxContainerName?: string | null; +}): Promise { + return invoke("workspace_update_remote", { + workspaceId: input.workspaceId, + baseUrl: input.baseUrl ?? null, + directory: input.directory ?? null, + displayName: input.displayName ?? null, + remoteType: input.remoteType ?? null, + openworkHostUrl: input.openworkHostUrl ?? null, + openworkToken: input.openworkToken ?? null, + openworkClientToken: input.openworkClientToken ?? null, + openworkHostToken: input.openworkHostToken ?? null, + openworkWorkspaceId: input.openworkWorkspaceId ?? null, + openworkWorkspaceName: input.openworkWorkspaceName ?? null, + sandboxBackend: input.sandboxBackend ?? null, + sandboxRunId: input.sandboxRunId ?? null, + sandboxContainerName: input.sandboxContainerName ?? null, + }); +} + +export async function workspaceUpdateDisplayName(input: { + workspaceId: string; + displayName?: string | null; +}): Promise { + return invoke("workspace_update_display_name", { + workspaceId: input.workspaceId, + displayName: input.displayName ?? null, + }); +} + +export async function workspaceForget(workspaceId: string): Promise { + return invoke("workspace_forget", { workspaceId }); +} + +export async function workspaceAddAuthorizedRoot(input: { + workspacePath: string; + folderPath: string; +}): Promise { + return invoke("workspace_add_authorized_root", { + workspacePath: input.workspacePath, + folderPath: input.folderPath, + }); +} + +export async function workspaceExportConfig(input: { + workspaceId: string; + outputPath: string; +}): Promise { + return invoke("workspace_export_config", { + workspaceId: input.workspaceId, + outputPath: input.outputPath, + }); +} + +export async function workspaceImportConfig(input: { + archivePath: string; + targetDir: string; + name?: string | null; +}): Promise { + return invoke("workspace_import_config", { + archivePath: input.archivePath, + targetDir: input.targetDir, + name: input.name ?? null, + }); +} + +export type OpencodeCommandDraft = { + name: string; + description?: string; + template: string; + agent?: string; + model?: string; + subtask?: boolean; +}; + +export type WorkspaceOpenworkConfig = { + version: number; + workspace?: { + name?: string | null; + createdAt?: number | null; + preset?: string | null; + } | null; + authorizedRoots: string[]; + reload?: { + auto?: boolean; + resume?: boolean; + } | null; +}; + +export async function workspaceOpenworkRead(input: { + workspacePath: string; +}): Promise { + return invoke("workspace_openwork_read", { + workspacePath: input.workspacePath, + }); +} + +export async function workspaceOpenworkWrite(input: { + workspacePath: string; + config: WorkspaceOpenworkConfig; +}): Promise { + return invoke("workspace_openwork_write", { + workspacePath: input.workspacePath, + config: input.config, + }); +} + +export async function opencodeCommandList(input: { + scope: "workspace" | "global"; + projectDir: string; +}): Promise { + return invoke("opencode_command_list", { + scope: input.scope, + projectDir: input.projectDir, + }); +} + +export async function opencodeCommandWrite(input: { + scope: "workspace" | "global"; + projectDir: string; + command: OpencodeCommandDraft; +}): Promise { + return invoke("opencode_command_write", { + scope: input.scope, + projectDir: input.projectDir, + command: input.command, + }); +} + +export async function opencodeCommandDelete(input: { + scope: "workspace" | "global"; + projectDir: string; + name: string; +}): Promise { + return invoke("opencode_command_delete", { + scope: input.scope, + projectDir: input.projectDir, + name: input.name, + }); +} + +export async function engineStop(): Promise { + return invoke("engine_stop"); +} + +export async function engineRestart(options?: { + opencodeEnableExa?: boolean; + openworkRemoteAccess?: boolean; +}): Promise { + return invoke("engine_restart", { + opencodeEnableExa: options?.opencodeEnableExa ?? null, + openworkRemoteAccess: options?.openworkRemoteAccess ?? null, + }); +} + +export type AppBuildInfo = { + version: string; + gitSha?: string | null; + buildEpoch?: string | null; + openworkDevMode?: boolean; + os?: string | null; + arch?: string | null; +}; + +export type DesktopBootstrapConfig = { + baseUrl: string; + apiBaseUrl?: string | null; + requireSignin: boolean; +}; + +export async function appBuildInfo(): Promise { + return invoke("app_build_info"); +} + +export async function getDesktopBootstrapConfig(): Promise { + return invoke("get_desktop_bootstrap_config"); +} + +export async function setDesktopBootstrapConfig( + config: DesktopBootstrapConfig, +): Promise { + return invoke("set_desktop_bootstrap_config", { config }); +} + +export async function nukeOpenworkAndOpencodeConfigAndExit(): Promise { + return invoke("nuke_openwork_and_opencode_config_and_exit"); +} + +export type OrchestratorDetachedHost = { + openworkUrl: string; + token: string; + ownerToken?: string | null; + hostToken: string; + port: number; + sandboxBackend?: "docker" | "microsandbox" | null; + sandboxRunId?: string | null; + sandboxContainerName?: string | null; +}; + +export async function orchestratorStartDetached(input: { + workspacePath: string; + sandboxBackend?: "none" | "docker" | "microsandbox" | null; + sandboxImageRef?: string | null; + runId?: string | null; + openworkToken?: string | null; + openworkHostToken?: string | null; +}): Promise { + return invoke("orchestrator_start_detached", { + workspacePath: input.workspacePath, + sandboxBackend: input.sandboxBackend ?? null, + sandboxImageRef: input.sandboxImageRef ?? null, + runId: input.runId ?? null, + openworkToken: input.openworkToken ?? null, + openworkHostToken: input.openworkHostToken ?? null, + }); +} + +export type SandboxDoctorResult = { + installed: boolean; + daemonRunning: boolean; + permissionOk: boolean; + ready: boolean; + clientVersion?: string | null; + serverVersion?: string | null; + error?: string | null; + debug?: { + candidates: string[]; + selectedBin?: string | null; + versionCommand?: { + status: number; + stdout: string; + stderr: string; + } | null; + infoCommand?: { + status: number; + stdout: string; + stderr: string; + } | null; + } | null; +}; + +export async function sandboxDoctor(): Promise { + return invoke("sandbox_doctor"); +} + +export async function sandboxStop(containerName: string): Promise { + return invoke("sandbox_stop", { containerName }); +} + +export type OpenworkDockerCleanupResult = { + candidates: string[]; + removed: string[]; + errors: string[]; +}; + +export async function sandboxCleanupOpenworkContainers(): Promise { + return invoke("sandbox_cleanup_openwork_containers"); +} + +export type SandboxDebugProbeResult = { + startedAt: number; + finishedAt: number; + runId: string; + workspacePath: string; + ready: boolean; + doctor: SandboxDoctorResult; + detachedHost?: OrchestratorDetachedHost | null; + dockerInspect?: { + status: number; + stdout: string; + stderr: string; + } | null; + dockerLogs?: { + status: number; + stdout: string; + stderr: string; + } | null; + cleanup: { + containerName?: string | null; + containerRemoved: boolean; + removeResult?: { + status: number; + stdout: string; + stderr: string; + } | null; + workspaceRemoved: boolean; + errors: string[]; + }; + error?: string | null; +}; + +export async function sandboxDebugProbe(): Promise { + return invoke("sandbox_debug_probe"); +} + +export async function openworkServerInfo(): Promise { + return invoke("openwork_server_info"); +} + +export async function openworkServerRestart(options?: { + remoteAccessEnabled?: boolean; +}): Promise { + return invoke("openwork_server_restart", { + remoteAccessEnabled: options?.remoteAccessEnabled ?? null, + }); +} + +export async function engineInfo(): Promise { + return invoke("engine_info"); +} + +export async function runtimeBootstrap(): Promise { + return { + ok: true, + skipped: true, + reason: "unsupported-runtime", + }; +} + +export async function engineDoctor(options?: { + preferSidecar?: boolean; + opencodeBinPath?: string | null; +}): Promise { + return invoke("engine_doctor", { + preferSidecar: options?.preferSidecar ?? true, + opencodeBinPath: options?.opencodeBinPath ?? null, + }); +} + +export async function pickDirectory(options?: { + title?: string; + defaultPath?: string; + multiple?: boolean; +}): Promise { + const { open } = await import("@tauri-apps/plugin-dialog"); + return open({ + title: options?.title, + defaultPath: options?.defaultPath, + directory: true, + canCreateDirectories: true, + multiple: options?.multiple, + }); +} + +export async function pickFile(options?: { + title?: string; + defaultPath?: string; + multiple?: boolean; + filters?: Array<{ name: string; extensions: string[] }>; +}): Promise { + const { open } = await import("@tauri-apps/plugin-dialog"); + return open({ + title: options?.title, + defaultPath: options?.defaultPath, + directory: false, + multiple: options?.multiple, + filters: options?.filters, + }); +} + +export async function saveFile(options?: { + title?: string; + defaultPath?: string; + filters?: Array<{ name: string; extensions: string[] }>; +}): Promise { + const { save } = await import("@tauri-apps/plugin-dialog"); + return save({ + title: options?.title, + defaultPath: options?.defaultPath, + filters: options?.filters, + }); +} + +export type ExecResult = { + ok: boolean; + status: number; + stdout: string; + stderr: string; +}; + +export async function engineInstall(): Promise { + return invoke("engine_install"); +} + +export async function importSkill( + projectDir: string, + sourceDir: string, + options?: { overwrite?: boolean }, +): Promise { + return invoke("import_skill", { + projectDir, + sourceDir, + overwrite: options?.overwrite ?? false, + }); +} + +export async function installSkillTemplate( + projectDir: string, + name: string, + content: string, + options?: { overwrite?: boolean }, +): Promise { + return invoke("install_skill_template", { + projectDir, + name, + content, + overwrite: options?.overwrite ?? false, + }); +} + +export type LocalSkillCard = { + name: string; + path: string; + description?: string; + trigger?: string; +}; + +export type LocalSkillContent = { + path: string; + content: string; +}; + +export async function listLocalSkills(projectDir: string): Promise { + return invoke("list_local_skills", { projectDir }); +} + +export async function readLocalSkill(projectDir: string, name: string): Promise { + return invoke("read_local_skill", { projectDir, name }); +} + +export async function writeLocalSkill(projectDir: string, name: string, content: string): Promise { + return invoke("write_local_skill", { projectDir, name, content }); +} + +export async function uninstallSkill(projectDir: string, name: string): Promise { + return invoke("uninstall_skill", { projectDir, name }); +} + +export type OpencodeConfigFile = { + path: string; + exists: boolean; + content: string | null; +}; + +export type UpdaterEnvironment = { + supported: boolean; + reason: string | null; + executablePath: string | null; + appBundlePath: string | null; +}; + +export async function updaterEnvironment(): Promise { + return invoke("updater_environment"); +} + +export async function readOpencodeConfig( + scope: "project" | "global", + projectDir: string, +): Promise { + return invoke("read_opencode_config", { scope, projectDir }); +} + +export async function writeOpencodeConfig( + scope: "project" | "global", + projectDir: string, + content: string, +): Promise { + return invoke("write_opencode_config", { scope, projectDir, content }); +} + +export async function resetOpenworkState(mode: "onboarding" | "all"): Promise { + return invoke("reset_openwork_state", { mode }); +} + +export type CacheResetResult = { + removed: string[]; + missing: string[]; + errors: string[]; +}; + +export async function resetOpencodeCache(): Promise { + return invoke("reset_opencode_cache"); +} + +export async function opencodeMcpAuth( + projectDir: string, + serverName: string, +): Promise { + const safeProjectDir = projectDir.trim(); + if (!safeProjectDir) { + throw new Error("project_dir is required"); + } + + const safeServerName = validateMcpServerName(serverName); + + return invoke("opencode_mcp_auth", { + projectDir: safeProjectDir, + serverName: safeServerName, + }); +} + +/** + * Set window decorations (titlebar) visibility. + * When `decorations` is false, the native titlebar is hidden. + * Useful for tiling window managers on Linux (e.g., Hyprland, i3, sway). + */ +export async function setWindowDecorations(decorations: boolean): Promise { + return invoke("set_window_decorations", { decorations }); +} diff --git a/apps/app/src/app/lib/desktop.ts b/apps/app/src/app/lib/desktop.ts new file mode 100644 index 0000000000..dd4a2b9546 --- /dev/null +++ b/apps/app/src/app/lib/desktop.ts @@ -0,0 +1,348 @@ +import * as tauriBridge from "./desktop-tauri"; +import { nativeDeepLinkEvent } from "./deep-link-bridge"; + +export type * from "./desktop-tauri"; + +export type DesktopBridge = typeof tauriBridge; + +declare global { + interface Window { + __OPENWORK_ELECTRON__?: { + bridge?: Partial; + invokeDesktop?: (command: string, ...args: unknown[]) => Promise; + shell?: { + openExternal?: (url: string) => Promise; + relaunch?: () => Promise; + }; + migration?: { + readSnapshot?: () => Promise; + ackSnapshot?: () => Promise<{ ok: boolean; moved: boolean }>; + }; + updater?: { + getChannel?: () => Promise<{ + channel: "stable" | "alpha"; + feedUrl: string; + currentVersion: string; + }>; + setChannel?: (channel: "stable" | "alpha") => Promise<{ + channel: "stable" | "alpha"; + feedUrl: string; + currentVersion: string; + }>; + check?: () => Promise<{ + available: boolean; + currentVersion?: string; + latestVersion?: string | null; + releaseDate?: string | null; + releaseNotes?: unknown; + channel?: "stable" | "alpha"; + feedUrl?: string; + reason?: string; + }>; + download?: () => Promise<{ ok: boolean; reason?: string }>; + installAndRestart?: () => Promise<{ ok: boolean; reason?: string }>; + }; + meta?: { + initialDeepLinks?: string[]; + platform?: "darwin" | "linux" | "windows"; + version?: string; + }; + }; + } +} + +function missingElectronMethod(method: string): never { + throw new Error(`Electron desktop bridge method is not implemented yet: ${method}`); +} + +function isElectronDesktopRuntime() { + return typeof window !== "undefined" && window.__OPENWORK_ELECTRON__ != null; +} + +function isTauriDesktopRuntime() { + return typeof window !== "undefined" && (window as any).__TAURI_INTERNALS__ != null; +} + +async function invokeElectronHelper(command: string, ...args: unknown[]): Promise { + const invokeDesktop = window.__OPENWORK_ELECTRON__?.invokeDesktop; + if (!invokeDesktop) { + throw new Error(`Electron desktop helper is unavailable: ${command}`); + } + return (await invokeDesktop(command, ...args)) as T; +} + +function resolveElectronBridge(): DesktopBridge { + const exposed = window.__OPENWORK_ELECTRON__?.bridge ?? {}; + const invokeDesktop = window.__OPENWORK_ELECTRON__?.invokeDesktop; + return new Proxy(exposed as DesktopBridge, { + get(target, prop, receiver) { + const value = Reflect.get(target, prop, receiver); + if (value != null) { + return value; + } + + if (prop === "resolveWorkspaceListSelectedId") { + return tauriBridge.resolveWorkspaceListSelectedId; + } + + if (typeof prop === "string" && invokeDesktop) { + return (...args: unknown[]) => invokeDesktop(prop, ...args); + } + + if (typeof prop === "string") { + return (..._args: unknown[]) => missingElectronMethod(prop); + } + + return value; + }, + }); +} + +function resolveDesktopBridge(): DesktopBridge { + if ( + typeof window !== "undefined" && + (window.__OPENWORK_ELECTRON__?.bridge || window.__OPENWORK_ELECTRON__?.invokeDesktop) + ) { + return resolveElectronBridge(); + } + return tauriBridge; +} + +export const desktopBridge: DesktopBridge = new Proxy({} as DesktopBridge, { + get(_target, prop, receiver) { + return Reflect.get(resolveDesktopBridge(), prop, receiver); + }, +}); + +function isLoopbackUrl(input: RequestInfo | URL): boolean { + const raw = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url; + try { + const url = new URL(raw); + return url.hostname === "127.0.0.1" || url.hostname === "localhost" || url.hostname === "[::1]"; + } catch { + return false; + } +} + +export const desktopFetch: typeof globalThis.fetch = (input, init) => { + if (isElectronDesktopRuntime()) { + if (isLoopbackUrl(input)) { + return globalThis.fetch(input, init); + } + + return invokeElectronHelper<{ + status: number; + statusText: string; + headers: [string, string][]; + body: string; + }>("__fetch", typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url, { + method: init?.method, + headers: init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : undefined, + body: typeof init?.body === "string" ? init.body : undefined, + }).then( + (result) => + new Response(result.body, { + status: result.status, + statusText: result.statusText, + headers: result.headers, + }), + ); + } + return tauriBridge.desktopFetch(input, init); +}; + +export async function openDesktopUrl(url: string): Promise { + if (isElectronDesktopRuntime()) { + const openExternal = window.__OPENWORK_ELECTRON__?.shell?.openExternal; + if (openExternal) { + await openExternal(url); + return; + } + } + if (isTauriDesktopRuntime()) { + await tauriBridge.openDesktopUrl(url); + return; + } + if (typeof window !== "undefined") { + window.open(url, "_blank", "noopener,noreferrer"); + } +} + +export async function openDesktopPath(target: string): Promise { + if (isElectronDesktopRuntime()) { + const result = await invokeElectronHelper("__openPath", target); + if (typeof result === "string" && result.trim()) { + throw new Error(result); + } + return; + } + await tauriBridge.openDesktopPath(target); +} + +export async function revealDesktopItemInDir(target: string): Promise { + if (isElectronDesktopRuntime()) { + await invokeElectronHelper("__revealItemInDir", target); + return; + } + await tauriBridge.revealDesktopItemInDir(target); +} + +export async function relaunchDesktopApp(): Promise { + if (isElectronDesktopRuntime()) { + await window.__OPENWORK_ELECTRON__?.shell?.relaunch?.(); + return; + } + await tauriBridge.relaunchDesktopApp(); +} + +export async function getDesktopHomeDir(): Promise { + if (isElectronDesktopRuntime()) { + return invokeElectronHelper("__homeDir"); + } + return tauriBridge.getDesktopHomeDir(); +} + +export async function joinDesktopPath(...parts: string[]): Promise { + if (isElectronDesktopRuntime()) { + return invokeElectronHelper("__joinPath", ...parts); + } + return tauriBridge.joinDesktopPath(...parts); +} + +export async function setDesktopZoomFactor(value: number): Promise { + if (isElectronDesktopRuntime()) { + return invokeElectronHelper("__setZoomFactor", value); + } + return tauriBridge.setDesktopZoomFactor(value); +} + +export async function subscribeDesktopDeepLinks( + handler: (urls: string[]) => void, +): Promise<() => void> { + if (isElectronDesktopRuntime()) { + const listener = (event: Event) => { + const customEvent = event as CustomEvent; + if (Array.isArray(customEvent.detail)) { + handler(customEvent.detail); + } + }; + window.addEventListener(nativeDeepLinkEvent, listener as EventListener); + const initialUrls = window.__OPENWORK_ELECTRON__?.meta?.initialDeepLinks; + if (Array.isArray(initialUrls) && initialUrls.length > 0) { + handler(initialUrls); + } + return () => { + window.removeEventListener(nativeDeepLinkEvent, listener as EventListener); + }; + } + + return tauriBridge.subscribeDesktopDeepLinks(handler); +} + +const { + resolveWorkspaceListSelectedId, + engineStart, + workspaceBootstrap, + workspaceSetSelected, + workspaceSetRuntimeActive, + workspaceCreate, + workspaceCreateRemote, + workspaceUpdateRemote, + workspaceUpdateDisplayName, + workspaceForget, + workspaceAddAuthorizedRoot, + workspaceExportConfig, + workspaceImportConfig, + workspaceOpenworkRead, + workspaceOpenworkWrite, + opencodeCommandList, + opencodeCommandWrite, + opencodeCommandDelete, + engineStop, + engineRestart, + appBuildInfo, + getDesktopBootstrapConfig, + setDesktopBootstrapConfig, + nukeOpenworkAndOpencodeConfigAndExit, + orchestratorStartDetached, + sandboxDoctor, + sandboxStop, + sandboxCleanupOpenworkContainers, + sandboxDebugProbe, + openworkServerInfo, + openworkServerRestart, + runtimeBootstrap, + engineInfo, + engineDoctor, + pickDirectory, + pickFile, + saveFile, + engineInstall, + importSkill, + installSkillTemplate, + listLocalSkills, + readLocalSkill, + writeLocalSkill, + uninstallSkill, + updaterEnvironment, + readOpencodeConfig, + writeOpencodeConfig, + resetOpenworkState, + resetOpencodeCache, + opencodeMcpAuth, + setWindowDecorations, +} = desktopBridge; + +export { + resolveWorkspaceListSelectedId, + engineStart, + workspaceBootstrap, + workspaceSetSelected, + workspaceSetRuntimeActive, + workspaceCreate, + workspaceCreateRemote, + workspaceUpdateRemote, + workspaceUpdateDisplayName, + workspaceForget, + workspaceAddAuthorizedRoot, + workspaceExportConfig, + workspaceImportConfig, + workspaceOpenworkRead, + workspaceOpenworkWrite, + opencodeCommandList, + opencodeCommandWrite, + opencodeCommandDelete, + engineStop, + engineRestart, + appBuildInfo, + getDesktopBootstrapConfig, + setDesktopBootstrapConfig, + nukeOpenworkAndOpencodeConfigAndExit, + orchestratorStartDetached, + sandboxDoctor, + sandboxStop, + sandboxCleanupOpenworkContainers, + sandboxDebugProbe, + openworkServerInfo, + openworkServerRestart, + runtimeBootstrap, + engineInfo, + engineDoctor, + pickDirectory, + pickFile, + saveFile, + engineInstall, + importSkill, + installSkillTemplate, + listLocalSkills, + readLocalSkill, + writeLocalSkill, + uninstallSkill, + updaterEnvironment, + readOpencodeConfig, + writeOpencodeConfig, + resetOpenworkState, + resetOpencodeCache, + opencodeMcpAuth, + setWindowDecorations, +}; diff --git a/apps/app/src/app/lib/dev-log.ts b/apps/app/src/app/lib/dev-log.ts new file mode 100644 index 0000000000..25594a55e2 --- /dev/null +++ b/apps/app/src/app/lib/dev-log.ts @@ -0,0 +1,88 @@ +export type DevLogLevel = "debug" | "warn" | "perf"; + +export type DevLogRecord = { + id: number; + at: string; + ts: number; + level: DevLogLevel; + source: string; + label: string; + payload?: unknown; +}; + +type DevRoot = typeof globalThis & { + __openworkDevLogSeq?: number; + __openworkDevLogs?: DevLogRecord[]; +}; + +const DEV_LOG_LIMIT = 1500; + +const payloadText = (value: unknown) => { + if (value === undefined) return ""; + if (typeof value === "string") return value; + try { + return JSON.stringify(value); + } catch { + return String(value); + } +}; + +export const recordDevLog = ( + enabled: boolean, + input: { + level: DevLogLevel; + source: string; + label: string; + payload?: unknown; + }, +) => { + if (!enabled) return; + + const root = globalThis as DevRoot; + const id = (root.__openworkDevLogSeq ?? 0) + 1; + root.__openworkDevLogSeq = id; + + const entry: DevLogRecord = { + id, + at: new Date().toISOString(), + ts: Date.now(), + level: input.level, + source: input.source, + label: input.label, + payload: input.payload, + }; + + const logs = root.__openworkDevLogs ?? []; + logs.push(entry); + if (logs.length > DEV_LOG_LIMIT) { + logs.splice(0, logs.length - DEV_LOG_LIMIT); + } + root.__openworkDevLogs = logs; +}; + +export const readDevLogs = (limit = 200) => { + const root = globalThis as DevRoot; + const logs = root.__openworkDevLogs ?? []; + if (limit === 0) return logs.slice(); + if (limit < 0) return []; + if (logs.length <= limit) return logs.slice(); + return logs.slice(logs.length - limit); +}; + +export const clearDevLogs = () => { + const root = globalThis as DevRoot; + root.__openworkDevLogs = []; + root.__openworkDevLogSeq = 0; +}; + +export const formatDevLogLine = (entry: DevLogRecord) => { + const prefix = `[${entry.at}] ${entry.level.toUpperCase()} ${entry.source}:${entry.label}`; + const text = payloadText(entry.payload); + return text ? `${prefix} ${text}` : prefix; +}; + +export const formatDevLogText = (limit = 200) => { + const lines = readDevLogs(limit).map(formatDevLogLine); + if (!lines.length) return ""; + return `${lines.join("\n")}\n`; +}; diff --git a/apps/app/src/app/lib/electron-alpha.ts b/apps/app/src/app/lib/electron-alpha.ts new file mode 100644 index 0000000000..5e30fbc6e9 --- /dev/null +++ b/apps/app/src/app/lib/electron-alpha.ts @@ -0,0 +1,80 @@ +import { desktopFetch } from "./desktop"; + +export type ElectronAlphaArtifact = { + arch: "arm64" | "x64"; + manifestUrl: string; + releaseUrl: string; + url: string; + path: string; + version: string; + sha512: string; +}; + +const ELECTRON_ALPHA_RELEASE_BASE_URL = + "https://github.com/different-ai/openwork/releases/download/alpha-macos-latest"; + +export const ELECTRON_ALPHA_RELEASE_PAGE_URL = + "https://github.com/different-ai/openwork/releases/tag/alpha-macos-latest"; + +export const ELECTRON_ALPHA_LATEST_MAC_YML_URL = `${ELECTRON_ALPHA_RELEASE_BASE_URL}/latest-mac.yml`; + +function parseYamlScalar(raw: string, key: string): string | null { + const pattern = new RegExp(`^\\s*${key}:\\s*(.+?)\\s*$`, "m"); + const match = raw.match(pattern); + if (!match?.[1]) return null; + return match[1].trim().replace(/^['"]|['"]$/g, ""); +} + +function parseFirstFileUrl(raw: string): string | null { + const match = raw.match(/^\s*-\s+url:\s*(.+?)\s*$/m); + if (!match?.[1]) return null; + return match[1].trim().replace(/^['"]|['"]$/g, ""); +} + +function resolveArtifactUrl(pathOrUrl: string): string { + if (/^https:\/\//i.test(pathOrUrl)) return pathOrUrl; + return new URL(pathOrUrl, `${ELECTRON_ALPHA_RELEASE_BASE_URL}/`).toString(); +} + +export function parseElectronLatestMacYml( + raw: string, + arch: "arm64" | "x64", +): ElectronAlphaArtifact { + const version = parseYamlScalar(raw, "version"); + const path = parseYamlScalar(raw, "path") ?? parseFirstFileUrl(raw); + const sha512 = parseYamlScalar(raw, "sha512"); + + if (!version) { + throw new Error("latest-mac.yml is missing version."); + } + if (!path) { + throw new Error("latest-mac.yml is missing artifact path/url."); + } + if (!sha512) { + throw new Error("latest-mac.yml is missing sha512."); + } + + return { + arch, + manifestUrl: ELECTRON_ALPHA_LATEST_MAC_YML_URL, + releaseUrl: ELECTRON_ALPHA_RELEASE_PAGE_URL, + url: resolveArtifactUrl(path), + path, + version, + sha512, + }; +} + +export async function resolveElectronAlphaArtifact( + arch: "arm64" | "x64" = "arm64", +): Promise { + const response = await desktopFetch(ELECTRON_ALPHA_LATEST_MAC_YML_URL, { + headers: { Accept: "text/yaml, text/plain, */*" }, + }); + if (!response.ok) { + throw new Error( + `Failed to fetch latest-mac.yml (${response.status} ${response.statusText}).`, + ); + } + return parseElectronLatestMacYml(await response.text(), arch); +} diff --git a/apps/app/src/app/lib/feedback.ts b/apps/app/src/app/lib/feedback.ts new file mode 100644 index 0000000000..5eb0c16a91 --- /dev/null +++ b/apps/app/src/app/lib/feedback.ts @@ -0,0 +1,109 @@ +const ENV_FEEDBACK_URL = String(import.meta.env.VITE_OPENWORK_FEEDBACK_URL ?? "").trim(); +const ENV_APP_VERSION = String(import.meta.env.VITE_OPENWORK_APP_VERSION ?? "").trim(); + +export const DEFAULT_FEEDBACK_URL = + ENV_FEEDBACK_URL || "https://openworklabs.com/feedback"; + +type FeedbackUrlOptions = { + entrypoint: string; + deployment?: string | null; + appVersion?: string | null; + openworkServerVersion?: string | null; + opencodeVersion?: string | null; + orchestratorVersion?: string | null; +}; + +type ClientOsContext = { + osName?: string; + osVersion?: string; + platform?: string; +}; + +function parseClientOsContext(): ClientOsContext { + if (typeof navigator === "undefined") return {}; + + const platform = + typeof (navigator as Navigator & { userAgentData?: { platform?: string } }).userAgentData + ?.platform === "string" + ? (navigator as Navigator & { userAgentData?: { platform?: string } }).userAgentData?.platform?.trim() ?? "" + : typeof navigator.platform === "string" + ? navigator.platform.trim() + : ""; + const userAgent = + typeof navigator.userAgent === "string" ? navigator.userAgent : ""; + + const macMatch = userAgent.match(/Mac OS X ([0-9_]+)/i); + if (macMatch) { + return { + osName: "macOS", + osVersion: macMatch[1]?.replace(/_/g, "."), + platform, + }; + } + + const windowsMatch = userAgent.match(/Windows NT ([0-9.]+)/i); + if (windowsMatch) { + const rawVersion = windowsMatch[1] ?? ""; + const mappedVersion = + rawVersion === "10.0" ? "10/11" : rawVersion || undefined; + return { + osName: "Windows", + osVersion: mappedVersion, + platform, + }; + } + + const iosMatch = userAgent.match(/(?:iPhone|iPad|iPod).*OS ([0-9_]+)/i); + if (iosMatch) { + return { + osName: "iOS", + osVersion: iosMatch[1]?.replace(/_/g, "."), + platform, + }; + } + + const androidMatch = userAgent.match(/Android ([0-9.]+)/i); + if (androidMatch) { + return { + osName: "Android", + osVersion: androidMatch[1], + platform, + }; + } + + if (/Linux/i.test(userAgent) || /Linux/i.test(platform)) { + return { + osName: "Linux", + platform, + }; + } + + return platform ? { platform } : {}; +} + +export function buildFeedbackUrl(options: FeedbackUrlOptions): string { + const url = new URL(DEFAULT_FEEDBACK_URL); + const osContext = parseClientOsContext(); + + url.searchParams.set("source", "openwork-app"); + url.searchParams.set("entrypoint", options.entrypoint); + + const entries = { + deployment: options.deployment?.trim() ?? "", + appVersion: options.appVersion?.trim() || ENV_APP_VERSION, + openworkServerVersion: options.openworkServerVersion?.trim() ?? "", + opencodeVersion: options.opencodeVersion?.trim() ?? "", + orchestratorVersion: options.orchestratorVersion?.trim() ?? "", + osName: osContext.osName?.trim() ?? "", + osVersion: osContext.osVersion?.trim() ?? "", + platform: osContext.platform?.trim() ?? "", + }; + + for (const [key, value] of Object.entries(entries)) { + if (value) { + url.searchParams.set(key, value); + } + } + + return url.toString(); +} diff --git a/apps/app/src/app/lib/font-zoom.ts b/apps/app/src/app/lib/font-zoom.ts new file mode 100644 index 0000000000..cf33f68ca2 --- /dev/null +++ b/apps/app/src/app/lib/font-zoom.ts @@ -0,0 +1,82 @@ +export const FONT_ZOOM_STORAGE_KEY = "openwork.desktop-font-zoom.v1"; +export const FONT_ZOOM_BASE_PX = 16; +export const FONT_ZOOM_STEP = 0.1; +export const FONT_ZOOM_MIN = 0.8; +export const FONT_ZOOM_MAX = 1.6; + +export type FontZoomShortcutAction = "in" | "out" | "reset"; +export type FontZoomTarget = { setZoom: (scaleFactor: number) => Promise }; + +export function clampFontZoom(value: number): number { + return Math.min(FONT_ZOOM_MAX, Math.max(FONT_ZOOM_MIN, value)); +} + +export function normalizeFontZoom(value: number): number { + return Math.round(clampFontZoom(value) * 100) / 100; +} + +export function parseFontZoomShortcut(event: { + key: string; + code: string; + metaKey: boolean; + ctrlKey: boolean; + altKey: boolean; +}): FontZoomShortcutAction | null { + const mod = event.metaKey || event.ctrlKey; + if (!mod || event.altKey) return null; + + if ( + event.code === "Equal" || + event.code === "NumpadAdd" || + event.key === "+" || + event.key === "=" + ) { + return "in"; + } + if ( + event.code === "Minus" || + event.code === "NumpadSubtract" || + event.key === "-" || + event.key === "_" + ) { + return "out"; + } + if (event.code === "Digit0" || event.code === "Numpad0" || event.key === "0") { + return "reset"; + } + + return null; +} + +export function readStoredFontZoom(storage: Pick): number | null { + try { + const raw = storage.getItem(FONT_ZOOM_STORAGE_KEY); + if (!raw) return null; + const parsed = Number(raw); + if (!Number.isFinite(parsed)) return null; + return normalizeFontZoom(parsed); + } catch { + return null; + } +} + +export function persistFontZoom(storage: Pick, value: number) { + try { + storage.setItem(FONT_ZOOM_STORAGE_KEY, String(value)); + } catch { + // ignore storage failures + } +} + +export function applyFontZoom(rootStyle: Pick, value: number): number { + const normalized = normalizeFontZoom(value); + const px = FONT_ZOOM_BASE_PX * normalized; + rootStyle.setProperty("--openwork-font-size", `${px}px`); + return normalized; +} + +export async function applyWebviewZoom(target: FontZoomTarget, value: number): Promise { + const normalized = normalizeFontZoom(value); + await target.setZoom(normalized); + return normalized; +} diff --git a/apps/app/src/app/lib/local-file-path.impl.d.ts b/apps/app/src/app/lib/local-file-path.impl.d.ts new file mode 100644 index 0000000000..34dc99ee87 --- /dev/null +++ b/apps/app/src/app/lib/local-file-path.impl.d.ts @@ -0,0 +1 @@ +export function normalizeLocalFilePath(value: unknown): string; diff --git a/apps/app/src/app/lib/local-file-path.impl.js b/apps/app/src/app/lib/local-file-path.impl.js new file mode 100644 index 0000000000..cbc660edbd --- /dev/null +++ b/apps/app/src/app/lib/local-file-path.impl.js @@ -0,0 +1,33 @@ +const FILE_URI_PREFIX_RE = /^file:(?:\/\/)?/i; +const WINDOWS_DRIVE_URI_PATH_RE = /^\/[A-Za-z]:\//; + +const safeDecodeURIComponent = (value) => { + try { + return decodeURIComponent(value); + } catch { + return value; + } +}; + +export const normalizeLocalFilePath = (value) => { + const trimmed = String(value ?? "").trim(); + if (!FILE_URI_PREFIX_RE.test(trimmed)) return trimmed; + + try { + const parsed = new URL(trimmed); + if (parsed.protocol !== "file:") return trimmed; + + const pathname = safeDecodeURIComponent(parsed.pathname || ""); + if (!pathname) return trimmed; + if (WINDOWS_DRIVE_URI_PATH_RE.test(pathname)) return pathname.slice(1); + if (parsed.hostname && parsed.hostname.toLowerCase() !== "localhost") { + return `//${parsed.hostname}${pathname}`; + } + return pathname; + } catch { + const decoded = safeDecodeURIComponent(trimmed.replace(FILE_URI_PREFIX_RE, "")); + if (!decoded) return trimmed; + if (WINDOWS_DRIVE_URI_PATH_RE.test(decoded)) return decoded.slice(1); + return decoded; + } +}; diff --git a/apps/app/src/app/lib/local-file-path.ts b/apps/app/src/app/lib/local-file-path.ts new file mode 100644 index 0000000000..5546f44a3d --- /dev/null +++ b/apps/app/src/app/lib/local-file-path.ts @@ -0,0 +1,4 @@ +import { normalizeLocalFilePath as normalizeLocalFilePathImpl } from "./local-file-path.impl.js"; + +export const normalizeLocalFilePath = (value: string): string => + normalizeLocalFilePathImpl(value) as string; diff --git a/apps/app/src/app/lib/migration.ts b/apps/app/src/app/lib/migration.ts new file mode 100644 index 0000000000..ab42ea0e5c --- /dev/null +++ b/apps/app/src/app/lib/migration.ts @@ -0,0 +1,218 @@ +// One-way Tauri → Electron migration snapshot plumbing. +// +// The Tauri shell exports localStorage keys the user actively depends on +// (workspace list, selected workspace, per-workspace last-session, server +// list) into a JSON file in app_data_dir just before launching the +// Electron installer. Electron reads that file on first launch, hydrates +// localStorage for the keys that are still empty, then marks the file as +// acknowledged. +// +// Scope decision: we migrate *workspace* keys only. Everything else +// (theme, font zoom, sidebar widths, feature flags) is cheap to redo and +// not worth the complexity of a cross-origin localStorage transfer. + +import { invoke } from "@tauri-apps/api/core"; + +export const MIGRATION_SNAPSHOT_VERSION = 1; + +// Keep this list tiny and strict. Adding keys here expands blast radius +// if a later release renames them. +export const MIGRATION_KEY_PATTERNS: Array = [ + /^openwork\.react\.activeWorkspace$/, + /^openwork\.react\.sessionByWorkspace$/, + /^openwork\.server\.list$/, + /^openwork\.server\.active$/, + /^openwork\.server\.urlOverride$/, + /^openwork\.server\.token$/, +]; + +export type MigrationSnapshot = { + version: typeof MIGRATION_SNAPSHOT_VERSION; + writtenAt: number; + source: "tauri"; + keys: Record; +}; + +function matchesMigrationKey(key: string) { + return MIGRATION_KEY_PATTERNS.some((pattern) => pattern.test(key)); +} + +function collectMigrationKeysFromLocalStorage(): Record { + const out: Record = {}; + if (typeof window === "undefined") return out; + for (let i = 0; i < window.localStorage.length; i++) { + const key = window.localStorage.key(i); + if (!key || !matchesMigrationKey(key)) continue; + const value = window.localStorage.getItem(key); + if (value != null) out[key] = value; + } + return out; +} + +/** + * Tauri-only. Called by the last Tauri release right before it kicks off + * the Electron installer. Snapshots the workspace-related localStorage + * keys to /migration-snapshot.v1.json via a Rust command + * that does the actual disk write (renderer can't write outside the + * sandbox on its own). + */ +export async function writeMigrationSnapshotFromTauri(): Promise<{ + ok: boolean; + keyCount: number; + reason?: string; +}> { + try { + const keys = collectMigrationKeysFromLocalStorage(); + const snapshot: MigrationSnapshot = { + version: MIGRATION_SNAPSHOT_VERSION, + writtenAt: Date.now(), + source: "tauri", + keys, + }; + await invoke("write_migration_snapshot", { snapshot }); + return { ok: true, keyCount: Object.keys(keys).length }; + } catch (error) { + return { + ok: false, + keyCount: 0, + reason: error instanceof Error ? error.message : String(error), + }; + } +} + +type ElectronMigrationBridge = { + readSnapshot: () => Promise; + ackSnapshot: () => Promise<{ ok: boolean; moved: boolean }>; +}; + +function electronMigrationBridge(): ElectronMigrationBridge | null { + if (typeof window === "undefined") return null; + const bridge = (window as unknown as { + __OPENWORK_ELECTRON__?: { migration?: ElectronMigrationBridge }; + }).__OPENWORK_ELECTRON__; + return bridge?.migration ?? null; +} + +/** + * Electron-only. Called once during app boot. Reads the migration + * snapshot (if any), hydrates localStorage for keys that aren't already + * set on the Electron install, and acks the file so we don't re-ingest + * on subsequent launches. + * + * Returns the number of keys hydrated. Returns 0 when there is no + * snapshot, which is the steady-state case after the first launch. + */ +export async function ingestMigrationSnapshotOnElectronBoot(): Promise { + const bridge = electronMigrationBridge(); + if (!bridge) return 0; + + let snapshot: MigrationSnapshot | null = null; + try { + snapshot = await bridge.readSnapshot(); + } catch { + return 0; + } + if (!snapshot || snapshot.version !== MIGRATION_SNAPSHOT_VERSION) return 0; + + const entries = Object.entries(snapshot.keys ?? {}); + let hydrated = 0; + if (typeof window !== "undefined") { + for (const [key, value] of entries) { + if (!matchesMigrationKey(key)) continue; + if (window.localStorage.getItem(key) != null) continue; + try { + window.localStorage.setItem(key, value); + hydrated += 1; + } catch { + // localStorage write failures are non-fatal; the user just won't + // see that key migrated this launch. + } + } + } + + try { + await bridge.ackSnapshot(); + } catch { + // A failed ack means we'll re-ingest on next launch, but the + // "skip if already set" guard keeps that idempotent. + } + + return hydrated; +} + +export type MigrateToElectronRequest = { + /** + * Download URL for the matching Electron artifact. On macOS a .zip. + * On Windows, an NSIS .exe (TODO — stubbed today). On Linux, an AppImage + * (TODO — stubbed today). + */ + url: string; + /** Optional sha256 to verify before touching the filesystem. */ + sha256?: string; + /** Optional electron-builder sha512 (base64) from latest-mac.yml. */ + sha512?: string; + /** + * Override where the Electron .app ends up (macOS). Defaults to + * replacing the currently-running .app bundle in place. + */ + targetAppPath?: string; +}; + +/** + * Tauri-only. Hand off to the new Electron build: + * 1. Download + verify the installer + * 2. Replace the running .app bundle + * 3. Relaunch into the Electron binary + * 4. Quit this Tauri process + * + * Callers should invoke `writeMigrationSnapshotFromTauri()` first so the + * new Electron shell can hydrate localStorage on first launch. + */ +export async function migrateToElectron( + request: MigrateToElectronRequest, +): Promise<{ ok: boolean; reason?: string }> { + try { + await invoke("migrate_to_electron", { request }); + return { ok: true }; + } catch (error) { + return { + ok: false, + reason: error instanceof Error ? error.message : String(error), + }; + } +} + +// Localstorage key that stores a "don't ask again until" epoch-ms. +// Users who click "Later" get a 24h reprieve; after that we nudge again. +export const MIGRATION_DEFER_KEY = "openwork.migration.deferredUntil"; +export const MIGRATION_DEFAULT_DEFER_MS = 24 * 60 * 60 * 1000; + +export function isMigrationDeferred(now: number = Date.now()): boolean { + if (typeof window === "undefined") return false; + try { + const raw = window.localStorage.getItem(MIGRATION_DEFER_KEY); + if (!raw) return false; + const until = Number.parseInt(raw, 10); + return Number.isFinite(until) && until > now; + } catch { + return false; + } +} + +export function deferMigration(ms: number = MIGRATION_DEFAULT_DEFER_MS): void { + if (typeof window === "undefined") return; + try { + window.localStorage.setItem(MIGRATION_DEFER_KEY, String(Date.now() + ms)); + } catch { + // non-fatal + } +} + +export function clearMigrationDefer(): void { + if (typeof window === "undefined") return; + try { + window.localStorage.removeItem(MIGRATION_DEFER_KEY); + } catch { + // non-fatal + } +} diff --git a/apps/app/src/app/lib/model-behavior.ts b/apps/app/src/app/lib/model-behavior.ts new file mode 100644 index 0000000000..a8cccd76eb --- /dev/null +++ b/apps/app/src/app/lib/model-behavior.ts @@ -0,0 +1,217 @@ +import type { ProviderListItem } from "../types"; +import type { ModelBehaviorOption } from "../types"; +import { t } from "../../i18n"; + +type ProviderModel = ProviderListItem["models"][string]; + +const WELL_KNOWN_VARIANT_ORDER = [ + "none", + "minimal", + "low", + "medium", + "high", + "xhigh", + "max", +] as const; + +function defaultBehaviorOption(): ModelBehaviorOption { + return { + value: null, + label: t("settings.provider_default_label"), + description: t("settings.provider_default_desc"), + }; +} + +const humanize = (value: string) => { + const cleaned = value.replace(/[_-]+/g, " ").replace(/\s+/g, " ").trim(); + if (!cleaned) return value; + return cleaned + .split(" ") + .filter(Boolean) + .map((word) => { + if (/\d/.test(word) || word.length <= 3) return word.toUpperCase(); + const lower = word.toLowerCase(); + return lower.charAt(0).toUpperCase() + lower.slice(1); + }) + .join(" "); +}; + +export const normalizeModelBehaviorValue = (value: string | null) => { + if (!value) return null; + const normalized = value.trim().toLowerCase(); + if (!normalized) return null; + if ( + normalized === "balance" || + normalized === "balanced" || + normalized === "default" || + normalized === "provider-default" + ) { + return null; + } + return normalized; +}; + +const getVariantKeys = (model: ProviderModel) => { + const keys = Object.keys(model.variants ?? {}) + .map((key) => normalizeModelBehaviorValue(key)) + .filter((key): key is string => Boolean(key)); + return Array.from(new Set(keys)); +}; + +const sortVariantKeys = (keys: string[]) => + keys.slice().sort((a, b) => { + const aIndex = WELL_KNOWN_VARIANT_ORDER.indexOf(a as (typeof WELL_KNOWN_VARIANT_ORDER)[number]); + const bIndex = WELL_KNOWN_VARIANT_ORDER.indexOf(b as (typeof WELL_KNOWN_VARIANT_ORDER)[number]); + if (aIndex !== -1 || bIndex !== -1) { + if (aIndex === -1) return 1; + if (bIndex === -1) return -1; + return aIndex - bIndex; + } + return a.localeCompare(b); + }); + +const providerFamily = (providerID: string, providerName?: string | null) => { + const normalizedId = providerID.trim().toLowerCase(); + if (["anthropic", "openai", "google", "opencode"].includes(normalizedId)) { + return normalizedId; + } + + const normalizedName = providerName?.trim().toLowerCase() ?? ""; + if (normalizedName.includes("anthropic")) return "anthropic"; + if (normalizedName.includes("openai")) return "openai"; + if (normalizedName.includes("google")) return "google"; + if (normalizedName.includes("opencode")) return "opencode"; + return normalizedId; +}; + +const getBehaviorTitle = ( + providerID: string, + model: ProviderModel, + variantKeys: string[], + providerName?: string | null, +) => { + const family = providerFamily(providerID, providerName); + if (variantKeys.length > 0) { + if (family === "anthropic") return t("model_behavior.title_extended_thinking"); + if (family === "google") return t("model_behavior.title_reasoning_budget"); + if ( + family === "openai" || + family === "opencode" || + variantKeys.some((key) => ["none", "minimal", "low", "medium", "high", "xhigh"].includes(key)) + ) { + return t("model_behavior.title_reasoning_effort"); + } + return t("app.model_behavior_title"); + } + if (model.capabilities?.reasoning) return t("model_behavior.title_builtin_reasoning"); + return t("model_behavior.title_standard_generation"); +}; + +const getVariantLabel = (providerID: string, key: string, providerName?: string | null) => { + const family = providerFamily(providerID, providerName); + if (key === "none") return t("model_behavior.label_fast"); + if (key === "minimal") return t("model_behavior.label_quick"); + if (key === "low") return t("model_behavior.label_light"); + if (key === "medium") return t("model_behavior.label_balanced"); + if (key === "high") return family === "anthropic" ? t("model_behavior.label_extended") : t("model_behavior.label_deep"); + if (key === "xhigh" || key === "max") return t("model_behavior.label_maximum"); + return humanize(key); +}; + +export const formatGenericBehaviorLabel = (value: string | null) => { + const normalized = normalizeModelBehaviorValue(value); + if (!normalized) return defaultBehaviorOption().label; + return getVariantLabel("generic", normalized); +}; + +const getVariantDescription = ( + providerID: string, + key: string, + label: string, + providerName?: string | null, +) => { + const family = providerFamily(providerID, providerName); + if (key === "none") return t("model_behavior.desc_none"); + if (key === "minimal") return t("model_behavior.desc_minimal"); + if (key === "low") return family === "google" + ? t("model_behavior.desc_low_google") + : t("model_behavior.desc_low"); + if (key === "medium") return t("model_behavior.desc_medium"); + if (key === "high") return family === "anthropic" + ? t("model_behavior.desc_high_anthropic") + : t("model_behavior.desc_high"); + if (key === "xhigh" || key === "max") return family === "anthropic" + ? t("model_behavior.desc_max_anthropic") + : t("model_behavior.desc_max"); + return t("model_behavior.desc_generic", { label: label.toLowerCase() }); +}; + +export const getModelBehaviorOptions = ( + providerID: string, + model: ProviderModel, + providerName?: string | null, +): ModelBehaviorOption[] => { + const variantKeys = sortVariantKeys(getVariantKeys(model)); + if (!variantKeys.length) return []; + return [ + defaultBehaviorOption(), + ...variantKeys.map((key) => { + const label = getVariantLabel(providerID, key, providerName); + return { + value: key, + label, + description: getVariantDescription(providerID, key, label, providerName), + }; + }), + ]; +}; + +export const sanitizeModelBehaviorValue = ( + providerID: string, + model: ProviderModel, + value: string | null, + providerName?: string | null, +) => { + const normalized = normalizeModelBehaviorValue(value); + if (!normalized) return null; + return getModelBehaviorOptions(providerID, model, providerName).some((option) => option.value === normalized) + ? normalized + : null; +}; + +export const getModelBehaviorSummary = ( + providerID: string, + model: ProviderModel, + value: string | null, + providerName?: string | null, +) => { + const options = getModelBehaviorOptions(providerID, model, providerName); + const sanitized = sanitizeModelBehaviorValue(providerID, model, value, providerName); + const selected = options.find((option) => option.value === sanitized) ?? options[0] ?? null; + const title = getBehaviorTitle(providerID, model, getVariantKeys(model), providerName); + + if (options.length > 0) { + return { + title, + label: selected?.label ?? defaultBehaviorOption().label, + description: selected?.description ?? defaultBehaviorOption().description, + options, + }; + } + + if (model.capabilities?.reasoning) { + return { + title, + label: t("model_behavior.label_builtin"), + description: t("model_behavior.desc_builtin"), + options, + }; + } + + return { + title, + label: t("model_behavior.label_standard"), + description: t("model_behavior.desc_standard"), + options, + }; +}; diff --git a/apps/app/src/app/lib/opencode-session.ts b/apps/app/src/app/lib/opencode-session.ts new file mode 100644 index 0000000000..713638e8fe --- /dev/null +++ b/apps/app/src/app/lib/opencode-session.ts @@ -0,0 +1,161 @@ +/** + * Typed helpers for OpenCode session operations. + * + * The OpenCode SDK (v2) exposes `session.abort`, `session.revert`, + * `session.unrevert`, `session.shell`, and `command.list` as typed methods. + * This module provides thin wrappers that avoid `as any` casts by using the + * SDK types directly, and adds feature-detection for newer API surface + * (e.g. `shellAsync`) that may not be present in older SDK versions. + */ +import type { Session } from "@opencode-ai/sdk/v2/client"; +import type { Client, ModelRef } from "../types"; +import { unwrap } from "./opencode"; + +// --------------------------------------------------------------------------- +// Session helpers +// --------------------------------------------------------------------------- + +/** + * Abort an active session. Silently succeeds if the session is already idle. + */ +export async function abortSession(client: Client, sessionID: string): Promise { + unwrap(await client.session.abort({ sessionID })); +} + +/** + * Abort an active session, swallowing errors (useful before revert/undo). + */ +export async function abortSessionSafe(client: Client, sessionID: string): Promise { + try { + await client.session.abort({ sessionID }); + } catch { + // intentional: abort may fail if session is already idle + } +} + +/** + * Revert a session to a specific message boundary. + */ +export async function revertSession( + client: Client, + sessionID: string, + messageID: string, +): Promise { + return unwrap(await client.session.revert({ sessionID, messageID })) as Session; +} + +/** + * Restore all previously reverted messages in a session. + */ +export async function unrevertSession( + client: Client, + sessionID: string, +): Promise { + return unwrap(await client.session.unrevert({ sessionID })) as Session; +} + +/** + * Compact/summarize a long session to reduce context size. + * Uses `session.summarize` when available and falls back to `/compact` command. + */ +export async function compactSession( + client: Client, + sessionID: string, + model: ModelRef, + options?: { directory?: string }, +): Promise { + const session = client.session as { summarize?: (input: { + sessionID: string; + directory?: string; + providerID: string; + modelID: string; + }) => Promise }; + + if (typeof session.summarize === "function") { + const result = await session.summarize({ + sessionID, + directory: options?.directory, + providerID: model.providerID, + modelID: model.modelID, + }); + assertNoClientError(result); + return; + } + + const modelString = `${model.providerID}/${model.modelID}`; + const result = await client.session.command({ + sessionID, + command: "compact", + arguments: "", + model: modelString, + directory: options?.directory, + }); + assertNoClientError(result); +} + +// --------------------------------------------------------------------------- +// Shell execution +// --------------------------------------------------------------------------- + +/** + * Execute a shell command in a session. Uses `shell` from the SDK. + * Falls back to `promptAsync` with a `!` prefix if `shell` is unavailable. + */ +export async function shellInSession( + client: Client, + sessionID: string, + command: string, + options?: { model?: { providerID: string; modelID: string }; agent?: string; variant?: string }, +): Promise { + const result = await client.session.shell({ sessionID, command }); + assertNoClientError(result); +} + +// --------------------------------------------------------------------------- +// Command listing +// --------------------------------------------------------------------------- + +export type CommandListItem = { + id: string; + name: string; + description?: string; + source?: "command" | "mcp" | "skill"; +}; + +/** + * List available slash commands for a workspace. + */ +export async function listCommands( + client: Client, + directory?: string, +): Promise { + try { + const result = await client.command.list({ directory }); + const list = result?.data ?? []; + if (!Array.isArray(list)) return []; + return list.map((cmd: Record) => ({ + id: `cmd:${cmd.name}`, + name: String(cmd.name ?? ""), + description: cmd.description ? String(cmd.description) : undefined, + source: cmd.source as CommandListItem["source"], + })); + } catch { + return []; + } +} + +// --------------------------------------------------------------------------- +// Internal +// --------------------------------------------------------------------------- + +function assertNoClientError(result: unknown): void { + const maybe = result as { error?: unknown } | null | undefined; + if (!maybe || maybe.error === undefined) return; + const message = + maybe.error instanceof Error + ? maybe.error.message + : typeof maybe.error === "string" + ? maybe.error + : JSON.stringify(maybe.error); + throw new Error(message || "Unknown error"); +} diff --git a/apps/app/src/app/lib/opencode.ts b/apps/app/src/app/lib/opencode.ts new file mode 100644 index 0000000000..48ea7a531c --- /dev/null +++ b/apps/app/src/app/lib/opencode.ts @@ -0,0 +1,507 @@ +import { createOpencodeClient, type Message, type Part, type Session, type Todo } from "@opencode-ai/sdk/v2/client"; + +import { desktopFetch } from "./desktop"; +import { createOpenworkServerClient, OpenworkServerError } from "./openwork-server"; +import { isDesktopRuntime } from "../utils"; + +type FieldsResult = + | ({ data: T; error?: undefined } & { request: Request; response: Response }) + | ({ data?: undefined; error: unknown } & { request: Request; response: Response }); + +type PromptAsyncParameters = { + sessionID: string; + directory?: string; + messageID?: string; + model?: { providerID: string; modelID: string }; + agent?: string; + noReply?: boolean; + tools?: { [key: string]: boolean }; + system?: string; + variant?: string; + parts?: unknown[]; + reasoning_effort?: string; +}; + +type CommandParameters = { + sessionID: string; + directory?: string; + messageID?: string; + agent?: string; + model?: string; + arguments?: string; + command?: string; + variant?: string; + parts?: unknown[]; + reasoning_effort?: string; +}; + +type SessionListParameters = { + directory?: string; + roots?: boolean; + start?: number; + search?: string; + limit?: number; +}; + +type SessionLookupParameters = { + sessionID: string; + directory?: string; +}; + +type SessionMessagesParameters = { + sessionID: string; + directory?: string; + limit?: number; +}; + +export type OpencodeAuth = { + username?: string; + password?: string; + token?: string; + mode?: "basic" | "openwork"; +}; + +const DEFAULT_OPENCODE_REQUEST_TIMEOUT_MS = 10_000; +const OAUTH_OPENCODE_REQUEST_TIMEOUT_MS = 5 * 60_000; +const MCP_AUTH_OPENCODE_REQUEST_TIMEOUT_MS = 90_000; +const SESSION_COMMAND_URL_RE = /\/session\/[^/?#]+\/command(?:[?#]|$)/; + +function getRequestUrl(input: RequestInfo | URL): string { + if (typeof input === "string") return input; + if (input instanceof URL) return input.toString(); + if (typeof Request !== "undefined" && input instanceof Request) return input.url; + return String(input); +} + +function resolveRequestTimeoutMs(input: RequestInfo | URL, fallbackMs: number): number { + const url = getRequestUrl(input); + if (SESSION_COMMAND_URL_RE.test(url)) { + return 0; + } + if (/\/provider\/oauth\//.test(url) || /\/mcp\/auth\/callback\b/.test(url)) { + return Math.max(fallbackMs, OAUTH_OPENCODE_REQUEST_TIMEOUT_MS); + } + if (/\/mcp\/.*auth\b/.test(url)) { + return Math.max(fallbackMs, MCP_AUTH_OPENCODE_REQUEST_TIMEOUT_MS); + } + return fallbackMs; +} + + +function buildDirectoryHeader(directory?: string) { + if (!directory?.trim()) return undefined; + const trimmed = directory.trim(); + return /[^\x00-\x7F]/.test(trimmed) ? encodeURIComponent(trimmed) : trimmed; +} + +async function postSessionRequest( + fetchImpl: typeof globalThis.fetch, + baseUrl: string, + path: string, + body: Record, + options?: { headers?: Record; directory?: string; throwOnError?: boolean }, +): Promise> { + const headers = new Headers(options?.headers); + headers.set("Content-Type", "application/json"); + const directoryHeader = buildDirectoryHeader(options?.directory); + if (directoryHeader) { + headers.set("x-opencode-directory", directoryHeader); + } + + const response = await fetchImpl(`${baseUrl}${path}`, { + method: "POST", + headers, + body: JSON.stringify(body), + }); + + const request = new Request(`${baseUrl}${path}`, { + method: "POST", + headers, + body: JSON.stringify(body), + }); + + if (response.ok) { + const data = response.status === 204 ? ({} as T) : ((await response.json()) as T); + return { data, request, response }; + } + + const text = await response.text(); + let error: unknown = text; + try { + error = text ? JSON.parse(text) : text; + } catch { + // ignore + } + if (options?.throwOnError) throw error; + return { error, request, response }; +} + +function resolveOpenworkWorkspaceMount(baseUrl: string): { baseUrl: string; workspaceId: string } | null { + try { + const url = new URL(baseUrl); + const match = url.pathname.replace(/\/+$/, "").match(/^(.*\/w\/([^/]+))\/opencode$/); + if (!match?.[1] || !match[2]) return null; + url.pathname = match[1]; + url.search = ""; + return { + baseUrl: url.toString().replace(/\/+$/, ""), + workspaceId: decodeURIComponent(match[2]), + }; + } catch { + return null; + } +} + +function createSyntheticResult( + url: string, + method: string, + input: + | { ok: true; data: T; status?: number } + | { ok: false; error: unknown; status?: number }, +): FieldsResult { + const request = new Request(url, { method }); + const response = new Response(input.ok ? JSON.stringify(input.data) : null, { + status: input.status ?? (input.ok ? 200 : 500), + headers: { "Content-Type": "application/json" }, + }); + if (input.ok) { + return { data: input.data, request, response }; + } + return { error: input.error, request, response }; +} + +async function wrapOpenworkRead( + url: string, + read: () => Promise, + options?: { throwOnError?: boolean }, +): Promise> { + try { + return createSyntheticResult(url, "GET", { ok: true, data: await read() }); + } catch (error) { + if (options?.throwOnError) throw error; + return createSyntheticResult(url, "GET", { + ok: false, + error, + status: error instanceof OpenworkServerError ? error.status : 500, + }); + } +} + +function shouldFallbackToLegacySessionRead(error: unknown): boolean { + if (!(error instanceof OpenworkServerError)) return false; + return error.status === 404 || error.status === 405 || error.status === 501; +} + +async function wrapOpenworkReadWithFallback( + url: string, + read: () => Promise, + fallback: () => Promise>, + options?: { throwOnError?: boolean }, +): Promise> { + try { + return createSyntheticResult(url, "GET", { ok: true, data: await read() }); + } catch (error) { + if (!shouldFallbackToLegacySessionRead(error)) { + if (options?.throwOnError) throw error; + return createSyntheticResult(url, "GET", { + ok: false, + error, + status: error instanceof OpenworkServerError ? error.status : 500, + }); + } + return fallback(); + } +} + +async function fetchWithTimeout( + fetchImpl: typeof globalThis.fetch, + input: RequestInfo | URL, + init: RequestInit | undefined, + timeoutMs: number, +) { + const effectiveTimeoutMs = resolveRequestTimeoutMs(input, timeoutMs); + if (!Number.isFinite(effectiveTimeoutMs) || effectiveTimeoutMs <= 0) { + return fetchImpl(input, init); + } + + const controller = typeof AbortController !== "undefined" ? new AbortController() : null; + const signal = controller?.signal; + const initWithSignal = signal && !init?.signal ? { ...(init ?? {}), signal } : init; + + let timeoutId: ReturnType | null = null; + const timeoutPromise = new Promise((_, reject) => { + timeoutId = setTimeout(() => { + try { + controller?.abort(); + } catch { + // ignore + } + reject(new Error("Request timed out.")); + }, effectiveTimeoutMs); + }); + + try { + return await Promise.race([fetchImpl(input, initWithSignal), timeoutPromise]); + } catch (error) { + const name = (error && typeof error === "object" && "name" in error ? (error as any).name : "") as string; + if (name === "AbortError") { + throw new Error("Request timed out."); + } + throw error; + } finally { + if (timeoutId) clearTimeout(timeoutId); + } +} + +const encodeBasicAuth = (auth?: OpencodeAuth) => { + if (!auth?.username || !auth?.password) return null; + const token = `${auth.username}:${auth.password}`; + if (typeof btoa === "function") return btoa(token); + const buffer = (globalThis as { Buffer?: { from: (input: string, encoding: string) => { toString: (encoding: string) => string } } }) + .Buffer; + return buffer ? buffer.from(token, "utf8").toString("base64") : null; +}; + +const resolveAuthHeader = (auth?: OpencodeAuth) => { + if (auth?.mode === "openwork" && auth.token) { + return `Bearer ${auth.token}`; + } + const encoded = encodeBasicAuth(auth); + return encoded ? `Basic ${encoded}` : null; +}; + +/** + * URLs whose response body we must stream chunk-by-chunk (SSE, long-running + * message streams, event subscriptions). The Tauri HTTP plugin's + * `fetch_read_body` IPC call blocks until the entire body is delivered, so + * pointing it at an infinite stream freezes the webview's main thread for + * minutes. For these endpoints we always use the webview's native fetch — + * CORS is already wide open on the openwork/opencode stack, so there's no + * reason to route them through the plugin. + */ +const STREAM_URL_RE = /\/(event|stream)(\b|\/|$|\?)/; + +function requestIsStreaming(input: RequestInfo | URL, init?: RequestInit): boolean { + const url = getRequestUrl(input); + if (STREAM_URL_RE.test(url)) return true; + const accept = + input instanceof Request + ? input.headers.get("accept") ?? input.headers.get("Accept") + : new Headers(init?.headers).get("accept") ?? new Headers(init?.headers).get("Accept"); + return typeof accept === "string" && accept.toLowerCase().includes("text/event-stream"); +} + +function nativeFetchRef(): typeof globalThis.fetch { + if (typeof window !== "undefined" && typeof window.fetch === "function") return window.fetch.bind(window); + return globalThis.fetch as typeof globalThis.fetch; +} + +const createDesktopFetch = (auth?: OpencodeAuth) => { + const authHeader = resolveAuthHeader(auth); + const addAuth = (headers: Headers) => { + if (!authHeader || headers.has("Authorization")) return; + headers.set("Authorization", authHeader); + }; + + return (input: RequestInfo | URL, init?: RequestInit) => { + // Streams must go through the webview's native fetch to avoid the + // Tauri HTTP plugin's `fetch_read_body` hang on never-closing bodies. + const shouldStream = requestIsStreaming(input, init); + const underlyingFetch = shouldStream + ? nativeFetchRef() + : desktopFetch; + // Streams should never be timed out at the transport layer; the caller + // aborts via AbortSignal when the subscription unmounts. + const timeoutMs = shouldStream ? 0 : DEFAULT_OPENCODE_REQUEST_TIMEOUT_MS; + + if (input instanceof Request) { + const headers = new Headers(input.headers); + addAuth(headers); + const request = new Request(input, { headers }); + return fetchWithTimeout(underlyingFetch, request, undefined, timeoutMs); + } + + const headers = new Headers(init?.headers); + addAuth(headers); + return fetchWithTimeout( + underlyingFetch, + input, + { + ...init, + headers, + }, + timeoutMs, + ); + }; +}; + +export function unwrap(result: FieldsResult): NonNullable { + if (result.data !== undefined) { + return result.data as NonNullable; + } + const message = + result.error instanceof Error + ? result.error.message + : typeof result.error === "string" + ? result.error + : JSON.stringify(result.error); + throw new Error(message || "Unknown error"); +} + +export function createClient(baseUrl: string, directory?: string, auth?: OpencodeAuth) { + const headers: Record = {}; + if (!isDesktopRuntime()) { + const authHeader = resolveAuthHeader(auth); + if (authHeader) { + headers.Authorization = authHeader; + } + } + + const fetchImpl = isDesktopRuntime() + ? createDesktopFetch(auth) + : (input: RequestInfo | URL, init?: RequestInit) => + fetchWithTimeout(globalThis.fetch, input, init, DEFAULT_OPENCODE_REQUEST_TIMEOUT_MS); + const client = createOpencodeClient({ + baseUrl, + directory, + headers: Object.keys(headers).length ? headers : undefined, + fetch: fetchImpl, + }); + + const session = client.session as typeof client.session; + const openworkMount = auth?.mode === "openwork" ? resolveOpenworkWorkspaceMount(baseUrl) : null; + const openworkSessionClient = + openworkMount && auth?.token + ? createOpenworkServerClient({ baseUrl: openworkMount.baseUrl, token: auth.token }) + : null; + // TODO(2026-04-12): remove the old-server compatibility path here once all + // OpenWork servers expose the workspace-scoped session read APIs. + const sessionOverrides = session as any as { + list: (parameters?: SessionListParameters, options?: { throwOnError?: boolean }) => Promise>; + get: (parameters: SessionLookupParameters, options?: { throwOnError?: boolean }) => Promise>; + messages: (parameters: SessionMessagesParameters, options?: { throwOnError?: boolean }) => Promise>>; + todo: (parameters: SessionLookupParameters, options?: { throwOnError?: boolean }) => Promise>; + promptAsync: (parameters: PromptAsyncParameters, options?: { throwOnError?: boolean }) => Promise>; + command: (parameters: CommandParameters, options?: { throwOnError?: boolean }) => Promise>; + }; + + const listOriginal = sessionOverrides.list.bind(session); + sessionOverrides.list = (parameters?: SessionListParameters, options?: { throwOnError?: boolean }) => { + if (!openworkMount || !openworkSessionClient) { + return listOriginal(parameters, options); + } + const query = new URLSearchParams(); + if (typeof parameters?.roots === "boolean") query.set("roots", String(parameters.roots)); + if (typeof parameters?.start === "number") query.set("start", String(parameters.start)); + if (parameters?.search?.trim()) query.set("search", parameters.search.trim()); + if (typeof parameters?.limit === "number") query.set("limit", String(parameters.limit)); + const url = `${openworkMount.baseUrl}/workspace/${encodeURIComponent(openworkMount.workspaceId)}/sessions${query.size ? `?${query.toString()}` : ""}`; + return wrapOpenworkReadWithFallback( + url, + async () => (await openworkSessionClient.listSessions(openworkMount.workspaceId, parameters)).items, + () => listOriginal(parameters, options), + options, + ); + }; + + const getOriginal = sessionOverrides.get.bind(session); + sessionOverrides.get = (parameters: SessionLookupParameters, options?: { throwOnError?: boolean }) => { + if (!openworkMount || !openworkSessionClient) { + return getOriginal(parameters, options); + } + const url = `${openworkMount.baseUrl}/workspace/${encodeURIComponent(openworkMount.workspaceId)}/sessions/${encodeURIComponent(parameters.sessionID)}`; + return wrapOpenworkReadWithFallback( + url, + async () => (await openworkSessionClient.getSession(openworkMount.workspaceId, parameters.sessionID)).item, + () => getOriginal(parameters, options), + options, + ); + }; + + const messagesOriginal = sessionOverrides.messages.bind(session); + sessionOverrides.messages = (parameters: SessionMessagesParameters, options?: { throwOnError?: boolean }) => { + if (!openworkMount || !openworkSessionClient) { + return messagesOriginal(parameters, options); + } + const query = new URLSearchParams(); + if (typeof parameters.limit === "number") query.set("limit", String(parameters.limit)); + const url = `${openworkMount.baseUrl}/workspace/${encodeURIComponent(openworkMount.workspaceId)}/sessions/${encodeURIComponent(parameters.sessionID)}/messages${query.size ? `?${query.toString()}` : ""}`; + return wrapOpenworkReadWithFallback( + url, + async () => + (await openworkSessionClient.getSessionMessages(openworkMount.workspaceId, parameters.sessionID, { + limit: parameters.limit, + })).items, + () => messagesOriginal(parameters, options), + options, + ); + }; + + const todoOriginal = sessionOverrides.todo.bind(session); + sessionOverrides.todo = (parameters: SessionLookupParameters, options?: { throwOnError?: boolean }) => { + if (!openworkMount || !openworkSessionClient) { + return todoOriginal(parameters, options); + } + const url = `${openworkMount.baseUrl}/workspace/${encodeURIComponent(openworkMount.workspaceId)}/sessions/${encodeURIComponent(parameters.sessionID)}/snapshot`; + return wrapOpenworkReadWithFallback( + url, + async () => (await openworkSessionClient.getSessionSnapshot(openworkMount.workspaceId, parameters.sessionID)).item.todos, + () => todoOriginal(parameters, options), + options, + ); + }; + + const promptAsyncOriginal = sessionOverrides.promptAsync.bind(session); + sessionOverrides.promptAsync = (parameters: PromptAsyncParameters, options?: { throwOnError?: boolean }) => { + if (!("reasoning_effort" in parameters)) { + return promptAsyncOriginal(parameters, options); + } + const { sessionID, directory: requestDirectory, ...body } = parameters; + return postSessionRequest(fetchImpl, baseUrl, `/session/${encodeURIComponent(sessionID)}/prompt_async`, body, { + headers: Object.keys(headers).length ? headers : undefined, + directory: requestDirectory ?? directory, + throwOnError: options?.throwOnError, + }); + }; + + const commandOriginal = sessionOverrides.command.bind(session); + sessionOverrides.command = (parameters: CommandParameters, options?: { throwOnError?: boolean }) => { + if (!("reasoning_effort" in parameters)) { + return commandOriginal(parameters, options); + } + const { sessionID, directory: requestDirectory, ...body } = parameters; + return postSessionRequest(fetchImpl, baseUrl, `/session/${encodeURIComponent(sessionID)}/command`, body, { + headers: Object.keys(headers).length ? headers : undefined, + directory: requestDirectory ?? directory, + throwOnError: options?.throwOnError, + }); + }; + + return client; +} + +export async function waitForHealthy( + client: ReturnType, + options?: { timeoutMs?: number; pollMs?: number }, +) { + const timeoutMs = options?.timeoutMs ?? 10_000; + const pollMs = options?.pollMs ?? 250; + + const start = Date.now(); + let lastError: string | null = null; + + while (Date.now() - start < timeoutMs) { + try { + const health = unwrap(await client.global.health()); + if (health.healthy) { + return health; + } + lastError = "Server reported unhealthy"; + } catch (error) { + lastError = error instanceof Error ? error.message : "Unknown error"; + } + await new Promise((resolve) => setTimeout(resolve, pollMs)); + } + + throw new Error(lastError ?? "Timed out waiting for server health"); +} diff --git a/apps/app/src/app/lib/openwork-deployment.ts b/apps/app/src/app/lib/openwork-deployment.ts new file mode 100644 index 0000000000..77dd4bc742 --- /dev/null +++ b/apps/app/src/app/lib/openwork-deployment.ts @@ -0,0 +1,25 @@ +export const OPENWORK_DEPLOYMENT_ENV_VAR = "VITE_OPENWORK_DEPLOYMENT"; + +export type OpenWorkDeployment = "desktop" | "web"; + +function normalizeDeployment(value: string | undefined): OpenWorkDeployment { + const normalized = value?.trim().toLowerCase(); + return normalized === "web" ? "web" : "desktop"; +} + +export function getOpenWorkDeployment(): OpenWorkDeployment { + const envValue = + typeof import.meta !== "undefined" && typeof import.meta.env?.VITE_OPENWORK_DEPLOYMENT === "string" + ? import.meta.env.VITE_OPENWORK_DEPLOYMENT + : undefined; + + return normalizeDeployment(envValue); +} + +export function isWebDeployment(): boolean { + return getOpenWorkDeployment() === "web"; +} + +export function isDesktopDeployment(): boolean { + return getOpenWorkDeployment() === "desktop"; +} diff --git a/apps/app/src/app/lib/openwork-env-runtime.ts b/apps/app/src/app/lib/openwork-env-runtime.ts new file mode 100644 index 0000000000..9999b9629f --- /dev/null +++ b/apps/app/src/app/lib/openwork-env-runtime.ts @@ -0,0 +1,88 @@ +const PENDING_CHANGES_KEY = "openwork.settings.environment.pendingChanges"; + +type PendingChangesState = { + pending: boolean; + runtimeKey?: string; +}; + +function getStorage(kind: "localStorage" | "sessionStorage"): Storage | null { + if (typeof window === "undefined") return null; + try { + return window[kind] ?? null; + } catch { + return null; + } +} + +function parsePendingChangesState(raw: string | null): PendingChangesState { + if (!raw) return { pending: false }; + if (raw === "1") return { pending: true }; + try { + const parsed = JSON.parse(raw) as { pending?: unknown; runtimeKey?: unknown }; + return { + pending: parsed.pending === true, + runtimeKey: typeof parsed.runtimeKey === "string" && parsed.runtimeKey.trim() + ? parsed.runtimeKey.trim() + : undefined, + }; + } catch { + return { pending: false }; + } +} + +export function buildOpenworkEnvRuntimeKey(input: { + baseUrl?: string | null; + pid?: number | null; + port?: number | null; +}): string | undefined { + const baseUrl = (input.baseUrl?.trim() ?? "").replace(/\/+$/, ""); + const pid = typeof input.pid === "number" && Number.isFinite(input.pid) && input.pid > 0 + ? `pid:${input.pid}` + : ""; + const port = !pid && typeof input.port === "number" && Number.isFinite(input.port) && input.port > 0 + ? `port:${input.port}` + : ""; + const runtime = pid || port; + if (!baseUrl && !runtime) return undefined; + return `${baseUrl || "openwork"}::${runtime || "runtime"}`; +} + +export function readOpenworkEnvPendingChanges(runtimeKey?: string | null): boolean { + const localStorage = getStorage("localStorage"); + const sessionStorage = getStorage("sessionStorage"); + const state = parsePendingChangesState(localStorage?.getItem(PENDING_CHANGES_KEY) ?? null); + const legacySessionState = parsePendingChangesState( + sessionStorage?.getItem(PENDING_CHANGES_KEY) ?? null, + ); + const pending = state.pending ? state : legacySessionState; + if (!pending.pending) return false; + + const currentRuntimeKey = runtimeKey?.trim() || undefined; + if (currentRuntimeKey && pending.runtimeKey && pending.runtimeKey !== currentRuntimeKey) { + writeOpenworkEnvPendingChanges(false); + return false; + } + + return true; +} + +export function writeOpenworkEnvPendingChanges(value: boolean, runtimeKey?: string | null): void { + const localStorage = getStorage("localStorage"); + const sessionStorage = getStorage("sessionStorage"); + try { + if (value) { + const payload = { + pending: true, + changedAt: Date.now(), + ...(runtimeKey?.trim() ? { runtimeKey: runtimeKey.trim() } : {}), + }; + localStorage?.setItem(PENDING_CHANGES_KEY, JSON.stringify(payload)); + sessionStorage?.removeItem(PENDING_CHANGES_KEY); + } else { + localStorage?.removeItem(PENDING_CHANGES_KEY); + sessionStorage?.removeItem(PENDING_CHANGES_KEY); + } + } catch { + // ignore persistence failures + } +} diff --git a/apps/app/src/app/lib/openwork-links.ts b/apps/app/src/app/lib/openwork-links.ts new file mode 100644 index 0000000000..b13ad8fca3 --- /dev/null +++ b/apps/app/src/app/lib/openwork-links.ts @@ -0,0 +1,206 @@ +import { DEFAULT_DEN_BASE_URL, normalizeDenBaseUrl } from "./den"; +import { normalizeOpenworkServerUrl } from "./openwork-server"; +import { normalizeBundleImportIntent, parseBundleDeepLink } from "../bundles/sources"; +import type { BundleRequest } from "../bundles/types"; + +export type RemoteWorkspaceDefaults = { + openworkHostUrl?: string | null; + openworkToken?: string | null; + directory?: string | null; + displayName?: string | null; + autoConnect?: boolean; +}; + +export type DenAuthDeepLink = { + grant: string; + denBaseUrl: string; +}; + +function isSupportedDeepLinkProtocol(protocol: string): boolean { + const normalized = protocol.toLowerCase(); + return normalized === "openwork:" || normalized === "openwork-dev:" || normalized === "https:" || normalized === "http:"; +} + +export function parseRemoteConnectDeepLink(rawUrl: string): RemoteWorkspaceDefaults | null { + let url: URL; + try { + url = new URL(rawUrl); + } catch { + return null; + } + + const protocol = url.protocol.toLowerCase(); + if (!isSupportedDeepLinkProtocol(protocol)) { + return null; + } + + const routeHost = url.hostname.toLowerCase(); + const routePath = url.pathname.replace(/^\/+/, "").toLowerCase(); + const routeSegments = routePath.split("/").filter(Boolean); + const routeTail = routeSegments[routeSegments.length - 1] ?? ""; + if (routeHost !== "connect-remote" && routePath !== "connect-remote" && routeTail !== "connect-remote") { + return null; + } + + const hostUrlRaw = url.searchParams.get("openworkHostUrl") ?? url.searchParams.get("openworkUrl") ?? ""; + const tokenRaw = url.searchParams.get("openworkToken") ?? url.searchParams.get("accessToken") ?? ""; + const normalizedHostUrl = normalizeOpenworkServerUrl(hostUrlRaw); + const token = tokenRaw.trim(); + if (!normalizedHostUrl || !token) { + return null; + } + + const workerName = url.searchParams.get("workerName")?.trim() ?? ""; + const workerId = url.searchParams.get("workerId")?.trim() ?? ""; + const displayName = workerName || (workerId ? `Worker ${workerId.slice(0, 8)}` : ""); + const autoConnectRaw = + url.searchParams.get("autoConnect") ?? + url.searchParams.get("bypassModal") ?? + url.searchParams.get("bypassAddWorkerModal") ?? + ""; + const autoConnect = ["1", "true", "yes", "on"].includes(autoConnectRaw.trim().toLowerCase()); + + return { + openworkHostUrl: normalizedHostUrl, + openworkToken: token, + directory: null, + displayName: displayName || null, + autoConnect, + }; +} + +export function stripRemoteConnectQuery(rawUrl: string): string | null { + let url: URL; + try { + url = new URL(rawUrl); + } catch { + return null; + } + + let changed = false; + for (const key of [ + "openworkHostUrl", + "openworkUrl", + "openworkToken", + "accessToken", + "workerId", + "workerName", + "autoConnect", + "bypassModal", + "bypassAddWorkerModal", + "source", + ]) { + if (url.searchParams.has(key)) { + url.searchParams.delete(key); + changed = true; + } + } + + if (!changed) { + return null; + } + + const search = url.searchParams.toString(); + return `${url.pathname}${search ? `?${search}` : ""}${url.hash}`; +} + +export function parseDenAuthDeepLink(rawUrl: string): DenAuthDeepLink | null { + let url: URL; + try { + url = new URL(rawUrl); + } catch { + return null; + } + + const protocol = url.protocol.toLowerCase(); + if (!isSupportedDeepLinkProtocol(protocol)) { + return null; + } + + const routeHost = url.hostname.toLowerCase(); + const routePath = url.pathname.replace(/^\/+/, "").toLowerCase(); + const routeSegments = routePath.split("/").filter(Boolean); + const routeTail = routeSegments[routeSegments.length - 1] ?? ""; + if (routeHost !== "den-auth" && routePath !== "den-auth" && routeTail !== "den-auth") { + return null; + } + + const grant = url.searchParams.get("grant")?.trim() ?? ""; + const denBaseUrl = normalizeDenBaseUrl(url.searchParams.get("denBaseUrl")?.trim() ?? "") ?? DEFAULT_DEN_BASE_URL; + if (!grant) { + return null; + } + + return { grant, denBaseUrl }; +} + +function normalizeDebugDeepLinkInput(rawValue: string): string { + const trimmed = rawValue.trim(); + if (!trimmed) return ""; + + const directMatch = trimmed.match(/(?:openwork-dev|openwork|https?):\/\/[^\s"'<>]+/i); + if (directMatch) return directMatch[0]; + + const bareShareMatch = trimmed.match(/share\.openwork(?:labs\.com|\.software)\/b\/[^\s"'<>]+/i); + if (bareShareMatch) return `https://${bareShareMatch[0]}`; + + return trimmed; +} + +export function parseDebugDeepLinkInput(rawValue: string): + | { kind: "bundle"; link: BundleRequest } + | { kind: "remote"; link: RemoteWorkspaceDefaults } + | { kind: "auth"; link: DenAuthDeepLink } + | null { + const normalized = normalizeDebugDeepLinkInput(rawValue); + if (!normalized) return null; + + const denAuthLink = parseDenAuthDeepLink(normalized); + if (denAuthLink) { + return { kind: "auth", link: denAuthLink }; + } + + const bundleLink = parseBundleDeepLink(normalized); + if (bundleLink) { + return { kind: "bundle", link: bundleLink }; + } + + const remoteConnectLink = parseRemoteConnectDeepLink(normalized); + if (remoteConnectLink) { + return { kind: "remote", link: remoteConnectLink }; + } + + const bundleMatch = normalized.match(/ow_bundle=([^&\s]+)/i); + if (bundleMatch?.[1]) { + try { + const bundleUrl = decodeURIComponent(bundleMatch[1]); + const intentMatch = normalized.match(/(?:ow_intent|intent)=([^&\s]+)/i); + const labelMatch = normalized.match(/ow_label=([^&\s]+)/i); + const sourceMatch = normalized.match(/(?:ow_source|source)=([^&\s]+)/i); + return { + kind: "bundle", + link: { + bundleUrl, + intent: normalizeBundleImportIntent(intentMatch?.[1] ? decodeURIComponent(intentMatch[1]) : undefined), + label: labelMatch?.[1] ? decodeURIComponent(labelMatch[1]) : undefined, + source: sourceMatch?.[1] ? decodeURIComponent(sourceMatch[1]) : undefined, + }, + }; + } catch { + // ignore fallback parsing errors + } + } + + const shareIdMatch = normalized.match(/share\.openwork(?:labs\.com|\.software)\/b\/([^\s/?#"'<>]+)/i); + if (shareIdMatch?.[1]) { + return { + kind: "bundle", + link: { + bundleUrl: `https://share.openworklabs.com/b/${shareIdMatch[1]}`, + intent: "new_worker", + }, + }; + } + + return null; +} diff --git a/apps/app/src/app/lib/openwork-server.ts b/apps/app/src/app/lib/openwork-server.ts new file mode 100644 index 0000000000..35737405d0 --- /dev/null +++ b/apps/app/src/app/lib/openwork-server.ts @@ -0,0 +1,1265 @@ +import type { Message, Part, Session, Todo } from "@opencode-ai/sdk/v2/client"; +import { desktopFetch } from "./desktop"; +import { isDesktopRuntime } from "../utils"; +import type { ExecResult, OpencodeConfigFile, WorkspaceInfo, WorkspaceList } from "./desktop"; + +export type OpenworkServerCapabilities = { + skills: { read: boolean; write: boolean; source: "openwork" | "opencode" }; + hub?: { + skills?: { + read: boolean; + install: boolean; + repo?: { owner: string; name: string; ref: string }; + }; + }; + plugins: { read: boolean; write: boolean }; + mcp: { read: boolean; write: boolean }; + commands: { read: boolean; write: boolean }; + config: { read: boolean; write: boolean }; + sandbox?: { enabled: boolean; backend: "none" | "docker" | "container" }; + proxy?: { opencode: boolean }; + toolProviders?: { + browser?: { + enabled: boolean; + placement: "in-sandbox" | "host-machine" | "client-machine" | "external"; + mode: "none" | "headless" | "interactive"; + }; + files?: { + injection: boolean; + outbox: boolean; + inboxPath: string; + outboxPath: string; + maxBytes: number; + }; + }; +}; + +export type OpenworkServerStatus = "connected" | "disconnected" | "limited"; + +export type OpenworkServerDiagnostics = { + ok: boolean; + version: string; + uptimeMs: number; + readOnly: boolean; + approval: { mode: "manual" | "auto"; timeoutMs: number }; + corsOrigins: string[]; + workspaceCount: number; + activeWorkspaceId?: string | null; + selectedWorkspaceId?: string | null; + workspace: OpenworkWorkspaceInfo | null; + authorizedRoots: string[]; + server: { host: string; port: number; configPath?: string | null }; + tokenSource: { client: string; host: string }; +}; + +export type OpenworkRuntimeServiceName = "openwork-server" | "opencode"; + +export type OpenworkRuntimeServiceSnapshot = { + name: OpenworkRuntimeServiceName; + enabled: boolean; + running: boolean; + targetVersion: string | null; + actualVersion: string | null; + upgradeAvailable: boolean; +}; + +export type OpenworkRuntimeSnapshot = { + ok: boolean; + orchestrator?: { + version: string; + startedAt: number; + }; + worker?: { + workspace: string; + sandboxMode: string; + }; + upgrade?: { + status: "idle" | "running" | "failed"; + startedAt: number | null; + finishedAt: number | null; + error: string | null; + operationId: string | null; + services: OpenworkRuntimeServiceName[]; + }; + services: OpenworkRuntimeServiceSnapshot[]; +}; + +export type OpenworkServerSettings = { + urlOverride?: string; + portOverride?: number; + token?: string; + hostToken?: string; + remoteAccessEnabled?: boolean; +}; + +export type OpenworkWorkspaceInfo = WorkspaceInfo & { + opencode?: { + baseUrl?: string; + directory?: string; + username?: string; + password?: string; + }; +}; + +export type OpenworkWorkspaceList = { + items: OpenworkWorkspaceInfo[]; + workspaces?: WorkspaceInfo[]; + activeId?: string | null; +}; + +export type OpenworkSessionMessage = { + info: Message; + parts: Part[]; +}; + +export type OpenworkSessionSnapshot = { + session: Session; + messages: OpenworkSessionMessage[]; + todos: Todo[]; + status: + | { type: "idle" } + | { type: "busy" } + | { type: "retry"; attempt: number; message: string; next: number }; +}; + +export type OpenworkPluginItem = { + spec: string; + source: "config" | "dir.project" | "dir.global"; + scope: "project" | "global"; + path?: string; +}; + +export type OpenworkSkillItem = { + name: string; + path: string; + description: string; + scope: "project" | "global"; + trigger?: string; +}; + +export type OpenworkSkillContent = { + item: OpenworkSkillItem; + content: string; +}; + +export type OpenworkHubSkillItem = { + name: string; + description: string; + trigger?: string; + source: { + owner: string; + repo: string; + ref: string; + path: string; + }; +}; + +export type OpenworkHubRepo = { + owner?: string; + repo?: string; + ref?: string; +}; + +export type OpenworkWorkspaceFileContent = { + path: string; + content: string; + bytes: number; + updatedAt: number; +}; + +export type OpenworkWorkspaceFileWriteResult = { + ok: boolean; + path: string; + bytes: number; + updatedAt: number; + revision?: string; +}; + +export type OpenworkCommandItem = { + name: string; + description?: string; + template: string; + agent?: string; + model?: string | null; + subtask?: boolean; + scope: "workspace" | "global"; +}; + +export type OpenworkMcpItem = { + name: string; + config: Record; + source: "config.project" | "config.global" | "config.remote"; + disabledByTools?: boolean; +}; + +export type OpenworkWorkspaceExport = { + workspaceId: string; + exportedAt: number; + opencode?: Record; + openwork?: Record; + skills?: Array<{ name: string; description?: string; trigger?: string; content: string }>; + commands?: Array<{ name: string; description?: string; template?: string }>; + files?: Array<{ path: string; content: string }>; +}; + +export type OpenworkWorkspaceImportChange = { + kind: "opencode" | "openwork" | "skill" | "command" | "file"; + action: "create" | "update" | "replace" | "delete" | "unchanged"; + label: string; + path: string; +}; + +export type OpenworkWorkspaceImportPreview = { + fingerprint: string; + summary: { + total: number; + create: number; + update: number; + replace: number; + delete: number; + unchanged: number; + }; + changes: OpenworkWorkspaceImportChange[]; +}; + +export type OpenworkWorkspaceExportSensitiveMode = "auto" | "include" | "exclude"; + +export type OpenworkWorkspaceExportWarning = { + id: string; + label: string; + detail: string; +}; + +export type OpenworkBlueprintSessionsMaterializeResult = { + ok: boolean; + created: Array<{ templateId: string; sessionId: string; title: string }>; + existing: Array<{ templateId: string; sessionId: string }>; + openSessionId: string | null; +}; + +export type OpenworkArtifactItem = { + id: string; + name?: string; + path?: string; + size?: number; + createdAt?: number; + updatedAt?: number; + mime?: string; +}; + +export type OpenworkArtifactList = { + items: OpenworkArtifactItem[]; +}; + +export type OpenworkInboxItem = { + id: string; + name?: string; + path?: string; + size?: number; + updatedAt?: number; +}; + +export type OpenworkInboxList = { + items: OpenworkInboxItem[]; +}; + +export type OpenworkInboxUploadResult = { + ok: boolean; + path: string; + bytes: number; +}; + +export type OpenworkActor = { + type: "remote" | "host"; + clientId?: string; + tokenHash?: string; +}; + +export type OpenworkAuditEntry = { + id: string; + workspaceId: string; + actor: OpenworkActor; + action: string; + target: string; + summary: string; + timestamp: number; +}; + +export type OpenworkReloadTrigger = { + type: "skill" | "plugin" | "config" | "mcp" | "agent" | "command"; + name?: string; + action?: "added" | "removed" | "updated"; + path?: string; +}; + +export type OpenworkReloadEvent = { + id: string; + seq: number; + workspaceId: string; + reason: "plugins" | "skills" | "mcp" | "config" | "agents" | "commands"; + trigger?: OpenworkReloadTrigger; + timestamp: number; +}; + +// Fallback for explicit server-mode URL derivation. Desktop local workers replace this +// with the persisted runtime-discovered port once the host reports it. +export const DEFAULT_OPENWORK_SERVER_PORT = 8787; + +const STORAGE_URL_OVERRIDE = "openwork.server.urlOverride"; +const STORAGE_PORT_OVERRIDE = "openwork.server.port"; +const STORAGE_TOKEN = "openwork.server.token"; +const STORAGE_HOST_TOKEN = "openwork.server.hostToken"; +const STORAGE_REMOTE_ACCESS = "openwork.server.remoteAccessEnabled"; + +export function normalizeOpenworkServerUrl(input: string) { + const trimmed = input.trim(); + if (!trimmed) return null; + const withProtocol = /^https?:\/\//.test(trimmed) ? trimmed : `http://${trimmed}`; + return withProtocol.replace(/\/+$/, ""); +} + +export function isLoopbackOpenworkServerUrl(input: string) { + const normalized = normalizeOpenworkServerUrl(input) ?? ""; + if (!normalized) return false; + try { + const hostname = new URL(normalized).hostname.toLowerCase(); + return hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1" || hostname === "[::1]"; + } catch { + return false; + } +} + +export function parseOpenworkWorkspaceIdFromUrl(input: string) { + const normalized = normalizeOpenworkServerUrl(input) ?? ""; + if (!normalized) return null; + + try { + const url = new URL(normalized); + const segments = url.pathname.split("/").filter(Boolean); + const last = segments[segments.length - 1] ?? ""; + const prev = segments[segments.length - 2] ?? ""; + if (prev !== "w" || !last) return null; + return decodeURIComponent(last); + } catch { + const match = normalized.match(/\/w\/([^/?#]+)/); + if (!match?.[1]) return null; + try { + return decodeURIComponent(match[1]); + } catch { + return match[1]; + } + } +} + +export function buildOpenworkWorkspaceBaseUrl(hostUrl: string, workspaceId?: string | null) { + const normalized = normalizeOpenworkServerUrl(hostUrl) ?? ""; + if (!normalized) return null; + + try { + const url = new URL(normalized); + const segments = url.pathname.split("/").filter(Boolean); + const last = segments[segments.length - 1] ?? ""; + const prev = segments[segments.length - 2] ?? ""; + const alreadyMounted = prev === "w" && Boolean(last); + if (alreadyMounted) { + return url.toString().replace(/\/+$/, ""); + } + + const id = (workspaceId ?? "").trim(); + if (!id) return url.toString().replace(/\/+$/, ""); + + const basePath = url.pathname.replace(/\/+$/, ""); + url.pathname = `${basePath}/w/${encodeURIComponent(id)}`; + return url.toString().replace(/\/+$/, ""); + } catch { + const id = (workspaceId ?? "").trim(); + if (!id) return normalized; + return `${normalized.replace(/\/+$/, "")}/w/${encodeURIComponent(id)}`; + } +} + +const OPENWORK_INVITE_PARAM_URL = "ow_url"; +const OPENWORK_INVITE_PARAM_TOKEN = "ow_token"; +const OPENWORK_INVITE_PARAM_STARTUP = "ow_startup"; +const OPENWORK_INVITE_PARAM_AUTO_CONNECT = "ow_auto_connect"; + +export type OpenworkConnectInvite = { + url: string; + token?: string; + startup?: "server"; + autoConnect?: boolean; +}; + +export function readOpenworkConnectInviteFromSearch(input: string | URLSearchParams) { + const search = + typeof input === "string" + ? new URLSearchParams(input.startsWith("?") ? input.slice(1) : input) + : input; + + const rawUrl = search.get(OPENWORK_INVITE_PARAM_URL)?.trim() ?? ""; + const url = normalizeOpenworkServerUrl(rawUrl); + if (!url) return null; + + const token = search.get(OPENWORK_INVITE_PARAM_TOKEN)?.trim() ?? ""; + const startupRaw = search.get(OPENWORK_INVITE_PARAM_STARTUP)?.trim() ?? ""; + const startup = startupRaw === "server" ? "server" : undefined; + const autoConnect = search.get(OPENWORK_INVITE_PARAM_AUTO_CONNECT)?.trim() === "1"; + + return { + url, + token: token || undefined, + startup, + autoConnect: autoConnect || undefined, + } satisfies OpenworkConnectInvite; +} + +export function stripOpenworkConnectInviteFromUrl(input: string) { + try { + const url = new URL(input); + url.searchParams.delete(OPENWORK_INVITE_PARAM_URL); + url.searchParams.delete(OPENWORK_INVITE_PARAM_TOKEN); + url.searchParams.delete(OPENWORK_INVITE_PARAM_STARTUP); + url.searchParams.delete(OPENWORK_INVITE_PARAM_AUTO_CONNECT); + return url.toString(); + } catch { + return input; + } +} + +export function readOpenworkServerSettings(): OpenworkServerSettings { + if (typeof window === "undefined") return {}; + try { + const urlOverride = normalizeOpenworkServerUrl( + window.localStorage.getItem(STORAGE_URL_OVERRIDE) ?? "", + ); + const portRaw = window.localStorage.getItem(STORAGE_PORT_OVERRIDE) ?? ""; + const portOverride = portRaw ? Number(portRaw) : undefined; + const token = window.localStorage.getItem(STORAGE_TOKEN) ?? undefined; + const hostToken = window.localStorage.getItem(STORAGE_HOST_TOKEN) ?? undefined; + const remoteAccessRaw = window.localStorage.getItem(STORAGE_REMOTE_ACCESS) ?? ""; + return { + urlOverride: urlOverride ?? undefined, + portOverride: Number.isNaN(portOverride) ? undefined : portOverride, + token: token?.trim() || undefined, + hostToken: hostToken?.trim() || undefined, + remoteAccessEnabled: remoteAccessRaw === "1", + }; + } catch { + return {}; + } +} + +export function writeOpenworkServerSettings(next: OpenworkServerSettings): OpenworkServerSettings { + if (typeof window === "undefined") return next; + try { + const urlOverride = normalizeOpenworkServerUrl(next.urlOverride ?? ""); + const portOverride = typeof next.portOverride === "number" ? next.portOverride : undefined; + const token = next.token?.trim() || undefined; + const hostToken = next.hostToken?.trim() || undefined; + const remoteAccessEnabled = next.remoteAccessEnabled === true; + + if (urlOverride) { + window.localStorage.setItem(STORAGE_URL_OVERRIDE, urlOverride); + } else { + window.localStorage.removeItem(STORAGE_URL_OVERRIDE); + } + + if (typeof portOverride === "number" && !Number.isNaN(portOverride)) { + window.localStorage.setItem(STORAGE_PORT_OVERRIDE, String(portOverride)); + } else { + window.localStorage.removeItem(STORAGE_PORT_OVERRIDE); + } + + if (token) { + window.localStorage.setItem(STORAGE_TOKEN, token); + } else { + window.localStorage.removeItem(STORAGE_TOKEN); + } + + if (hostToken) { + window.localStorage.setItem(STORAGE_HOST_TOKEN, hostToken); + } else { + window.localStorage.removeItem(STORAGE_HOST_TOKEN); + } + + if (remoteAccessEnabled) { + window.localStorage.setItem(STORAGE_REMOTE_ACCESS, "1"); + } else { + window.localStorage.removeItem(STORAGE_REMOTE_ACCESS); + } + + return readOpenworkServerSettings(); + } catch { + return next; + } +} + +export function hydrateOpenworkServerSettingsFromEnv() { + if (typeof window === "undefined") return; + + const envUrl = typeof import.meta.env?.VITE_OPENWORK_URL === "string" + ? import.meta.env.VITE_OPENWORK_URL.trim() + : ""; + const envPort = typeof import.meta.env?.VITE_OPENWORK_PORT === "string" + ? import.meta.env.VITE_OPENWORK_PORT.trim() + : ""; + const envToken = typeof import.meta.env?.VITE_OPENWORK_TOKEN === "string" + ? import.meta.env.VITE_OPENWORK_TOKEN.trim() + : ""; + const envHostToken = typeof import.meta.env?.VITE_OPENWORK_HOST_TOKEN === "string" + ? import.meta.env.VITE_OPENWORK_HOST_TOKEN.trim() + : ""; + + if (!envUrl && !envPort && !envToken && !envHostToken) return; + + try { + const current = readOpenworkServerSettings(); + const next: OpenworkServerSettings = { ...current }; + let changed = false; + + if (!current.urlOverride && envUrl) { + next.urlOverride = normalizeOpenworkServerUrl(envUrl) ?? undefined; + changed = true; + } + + if (!current.portOverride && envPort) { + const parsed = Number(envPort); + if (Number.isFinite(parsed) && parsed > 0) { + next.portOverride = parsed; + changed = true; + } + } + + if (!current.token && envToken) { + next.token = envToken; + changed = true; + } + + if (!current.hostToken && envHostToken) { + next.hostToken = envHostToken; + changed = true; + } + + if (changed) { + writeOpenworkServerSettings(next); + } + } catch { + // ignore + } +} + +export function clearOpenworkServerSettings() { + if (typeof window === "undefined") return; + try { + window.localStorage.removeItem(STORAGE_URL_OVERRIDE); + window.localStorage.removeItem(STORAGE_PORT_OVERRIDE); + window.localStorage.removeItem(STORAGE_TOKEN); + window.localStorage.removeItem(STORAGE_HOST_TOKEN); + window.localStorage.removeItem(STORAGE_REMOTE_ACCESS); + } catch { + // ignore + } +} + +export class OpenworkServerError extends Error { + status: number; + code: string; + details?: unknown; + + constructor(status: number, code: string, message: string, details?: unknown) { + super(message); + this.status = status; + this.code = code; + this.details = details; + } +} + +function buildHeaders( + token?: string, + hostToken?: string, + extra?: Record, +) { + const headers: Record = { "Content-Type": "application/json" }; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + if (hostToken) { + headers["X-OpenWork-Host-Token"] = hostToken; + } + if (extra) { + Object.assign(headers, extra); + } + return headers; +} + +function buildAuthHeaders(token?: string, hostToken?: string, extra?: Record) { + const headers: Record = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + if (hostToken) { + headers["X-OpenWork-Host-Token"] = hostToken; + } + if (extra) { + Object.assign(headers, extra); + } + return headers; +} + +// Use Tauri's fetch when running in the desktop app to avoid CORS issues. +// Stream URLs (SSE) bypass the plugin because its `fetch_read_body` IPC call +// blocks until the body closes — that freezes the webview for infinite bodies. +const OPENWORK_STREAM_URL_RE = /\/events(\b|\?)|\/event-stream\b|\/stream\b/; + +function isStreamUrl(url: string): boolean { + return OPENWORK_STREAM_URL_RE.test(url); +} + +const resolveFetch = (url?: string) => { + if (!isDesktopRuntime()) return globalThis.fetch; + if (url && isStreamUrl(url)) { + return typeof window !== "undefined" ? window.fetch.bind(window) : globalThis.fetch; + } + return desktopFetch; +}; + +const DEFAULT_OPENWORK_SERVER_TIMEOUT_MS = 10_000; + +type FetchLike = (input: RequestInfo | URL, init?: RequestInit) => Promise; + +async function fetchWithTimeout( + fetchImpl: FetchLike, + url: string, + init: RequestInit, + timeoutMs: number, +) { + if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) { + return fetchImpl(url, init); + } + + const controller = typeof AbortController !== "undefined" ? new AbortController() : null; + const signal = controller?.signal; + const initWithSignal = signal && !init.signal ? { ...init, signal } : init; + + let timeoutId: ReturnType | null = null; + const timeoutPromise = new Promise((_, reject) => { + timeoutId = setTimeout(() => { + try { + controller?.abort(); + } catch { + // ignore + } + reject(new Error("Request timed out.")); + }, timeoutMs); + }); + + try { + return await Promise.race([fetchImpl(url, initWithSignal), timeoutPromise]); + } catch (error) { + const name = (error && typeof error === "object" && "name" in error ? (error as any).name : "") as string; + if (name === "AbortError") { + throw new Error("Request timed out."); + } + throw error; + } finally { + if (timeoutId) clearTimeout(timeoutId); + } +} + +async function requestJson( + baseUrl: string, + path: string, + options: { method?: string; token?: string; hostToken?: string; body?: unknown; timeoutMs?: number } = {}, +): Promise { + const url = `${baseUrl}${path}`; + const fetchImpl = resolveFetch(url); + const response = await fetchWithTimeout( + fetchImpl, + url, + { + method: options.method ?? "GET", + headers: buildHeaders(options.token, options.hostToken), + body: options.body ? JSON.stringify(options.body) : undefined, + }, + options.timeoutMs ?? DEFAULT_OPENWORK_SERVER_TIMEOUT_MS, + ); + + const text = await response.text(); + const json = text ? JSON.parse(text) : null; + + if (!response.ok) { + const code = typeof json?.code === "string" ? json.code : "request_failed"; + const message = typeof json?.message === "string" ? json.message : response.statusText; + throw new OpenworkServerError(response.status, code, message, json?.details); + } + + return json as T; +} + +async function requestMultipartRaw( + baseUrl: string, + path: string, + options: { method?: string; token?: string; hostToken?: string; body?: FormData; timeoutMs?: number } = {}, +): Promise<{ ok: boolean; status: number; text: string }>{ + const url = `${baseUrl}${path}`; + const fetchImpl = resolveFetch(url); + const response = await fetchWithTimeout( + fetchImpl, + url, + { + method: options.method ?? "POST", + headers: buildAuthHeaders(options.token, options.hostToken), + body: options.body, + }, + options.timeoutMs ?? DEFAULT_OPENWORK_SERVER_TIMEOUT_MS, + ); + const text = await response.text(); + return { ok: response.ok, status: response.status, text }; +} + +async function requestBinary( + baseUrl: string, + path: string, + options: { method?: string; token?: string; hostToken?: string; timeoutMs?: number } = {}, +): Promise<{ data: ArrayBuffer; contentType: string | null; filename: string | null }>{ + const url = `${baseUrl}${path}`; + const fetchImpl = resolveFetch(url); + const response = await fetchWithTimeout( + fetchImpl, + url, + { + method: options.method ?? "GET", + headers: buildAuthHeaders(options.token, options.hostToken), + }, + options.timeoutMs ?? DEFAULT_OPENWORK_SERVER_TIMEOUT_MS, + ); + + if (!response.ok) { + const text = await response.text(); + let json: any = null; + try { + json = text ? JSON.parse(text) : null; + } catch { + json = null; + } + const code = typeof json?.code === "string" ? json.code : "request_failed"; + const message = typeof json?.message === "string" ? json.message : response.statusText; + throw new OpenworkServerError(response.status, code, message, json?.details); + } + + const contentType = response.headers.get("content-type"); + const disposition = response.headers.get("content-disposition") ?? ""; + const filenameMatch = disposition.match(/filename\*=UTF-8''([^;]+)|filename="?([^";]+)"?/i); + const filenameRaw = filenameMatch?.[1] ?? filenameMatch?.[2] ?? null; + const filename = filenameRaw ? decodeURIComponent(filenameRaw) : null; + const data = await response.arrayBuffer(); + return { data, contentType, filename }; +} + +export function createOpenworkServerClient(options: { baseUrl: string; token?: string; hostToken?: string }) { + const baseUrl = options.baseUrl.replace(/\/+$/, ""); + const token = options.token; + const hostToken = options.hostToken; + + const timeouts = { + health: 3_000, + capabilities: 6_000, + listWorkspaces: 8_000, + activateWorkspace: 10_000, + deleteWorkspace: 10_000, + deleteSession: 12_000, + sessionRead: 12_000, + status: 6_000, + config: 10_000, + workspaceExport: 30_000, + workspaceImport: 30_000, + shareBundle: 20_000, + binary: 60_000, + }; + + return { + baseUrl, + token, + health: () => + requestJson<{ ok: boolean; version: string; uptimeMs: number }>(baseUrl, "/health", { token, hostToken, timeoutMs: timeouts.health }), + runtimeVersions: () => + requestJson(baseUrl, "/runtime/versions", { token, hostToken, timeoutMs: timeouts.status }), + status: () => requestJson(baseUrl, "/status", { token, hostToken, timeoutMs: timeouts.status }), + capabilities: () => requestJson(baseUrl, "/capabilities", { token, hostToken, timeoutMs: timeouts.capabilities }), + listWorkspaces: () => requestJson(baseUrl, "/workspaces", { token, hostToken, timeoutMs: timeouts.listWorkspaces }), + createLocalWorkspace: (payload: { folderPath: string; name: string; preset: string }) => + requestJson(baseUrl, "/workspaces/local", { + token, + hostToken, + method: "POST", + body: payload, + timeoutMs: timeouts.activateWorkspace, + }), + updateWorkspaceDisplayName: (workspaceId: string, displayName: string | null) => + requestJson(baseUrl, `/workspaces/${encodeURIComponent(workspaceId)}/display-name`, { + token, + hostToken, + method: "PATCH", + body: { displayName }, + timeoutMs: timeouts.activateWorkspace, + }), + activateWorkspace: (workspaceId: string) => + requestJson<{ activeId: string; workspace: OpenworkWorkspaceInfo }>( + baseUrl, + `/workspaces/${encodeURIComponent(workspaceId)}/activate`, + { token, hostToken, method: "POST", timeoutMs: timeouts.activateWorkspace }, + ), + deleteWorkspace: (workspaceId: string) => + requestJson<{ ok: boolean; deleted: boolean; persisted: boolean; activeId: string | null; items: OpenworkWorkspaceInfo[]; workspaces?: WorkspaceInfo[] }>( + baseUrl, + `/workspaces/${encodeURIComponent(workspaceId)}`, + { token, hostToken, method: "DELETE", timeoutMs: timeouts.deleteWorkspace }, + ), + deleteSession: (workspaceId: string, sessionId: string) => + requestJson<{ ok: boolean }>( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/sessions/${encodeURIComponent(sessionId)}`, + { token, hostToken, method: "DELETE", timeoutMs: timeouts.deleteSession }, + ), + listSessions: ( + workspaceId: string, + options?: { roots?: boolean; start?: number; search?: string; limit?: number }, + ) => { + const query = new URLSearchParams(); + if (typeof options?.roots === "boolean") query.set("roots", String(options.roots)); + if (typeof options?.start === "number") query.set("start", String(options.start)); + if (options?.search?.trim()) query.set("search", options.search.trim()); + if (typeof options?.limit === "number") query.set("limit", String(options.limit)); + const suffix = query.size ? `?${query.toString()}` : ""; + return requestJson<{ items: Session[] }>( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/sessions${suffix}`, + { token, hostToken, timeoutMs: timeouts.sessionRead }, + ); + }, + getSession: (workspaceId: string, sessionId: string) => + requestJson<{ item: Session }>( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/sessions/${encodeURIComponent(sessionId)}`, + { token, hostToken, timeoutMs: timeouts.sessionRead }, + ), + getSessionMessages: (workspaceId: string, sessionId: string, options?: { limit?: number }) => { + const query = new URLSearchParams(); + if (typeof options?.limit === "number") query.set("limit", String(options.limit)); + const suffix = query.size ? `?${query.toString()}` : ""; + return requestJson<{ items: OpenworkSessionMessage[] }>( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/sessions/${encodeURIComponent(sessionId)}/messages${suffix}`, + { token, hostToken, timeoutMs: timeouts.sessionRead }, + ); + }, + getSessionSnapshot: (workspaceId: string, sessionId: string, options?: { limit?: number }) => { + const query = new URLSearchParams(); + if (typeof options?.limit === "number") query.set("limit", String(options.limit)); + const suffix = query.size ? `?${query.toString()}` : ""; + return requestJson<{ item: OpenworkSessionSnapshot }>( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/sessions/${encodeURIComponent(sessionId)}/snapshot${suffix}`, + { token, hostToken, timeoutMs: timeouts.sessionRead }, + ); + }, + exportWorkspace: ( + workspaceId: string, + options?: { sensitiveMode?: OpenworkWorkspaceExportSensitiveMode }, + ) => { + const query = new URLSearchParams(); + if (options?.sensitiveMode) { + query.set("sensitive", options.sensitiveMode); + } + const suffix = query.size ? `?${query.toString()}` : ""; + return requestJson(baseUrl, `/workspace/${encodeURIComponent(workspaceId)}/export${suffix}`, { + token, + hostToken, + timeoutMs: timeouts.workspaceExport, + }); + }, + importWorkspace: (workspaceId: string, payload: Record) => + requestJson<{ ok: boolean; preview?: OpenworkWorkspaceImportPreview }>(baseUrl, `/workspace/${encodeURIComponent(workspaceId)}/import`, { + token, + hostToken, + method: "POST", + body: payload, + timeoutMs: timeouts.workspaceImport, + }), + previewWorkspaceImport: (workspaceId: string, payload: Record) => + requestJson( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/import/preview`, + { + token, + hostToken, + method: "POST", + body: payload, + timeoutMs: timeouts.workspaceImport, + }, + ), + materializeBlueprintSessions: (workspaceId: string) => + requestJson( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/blueprint/sessions/materialize`, + { + token, + hostToken, + method: "POST", + timeoutMs: timeouts.workspaceImport, + }, + ), + publishBundle: (payload: unknown, bundleType: "skill" | "skills-set", options?: { name?: string; timeoutMs?: number }) => + requestJson<{ url: string }>(baseUrl, "/share/bundles/publish", { + token, + hostToken, + method: "POST", + body: { + payload, + bundleType, + name: options?.name, + timeoutMs: options?.timeoutMs, + }, + timeoutMs: options?.timeoutMs ?? timeouts.shareBundle, + }), + fetchBundle: (bundleUrl: string, options?: { timeoutMs?: number }) => + requestJson>(baseUrl, "/share/bundles/fetch", { + token, + hostToken, + method: "POST", + body: { + bundleUrl, + timeoutMs: options?.timeoutMs, + }, + timeoutMs: options?.timeoutMs ?? timeouts.shareBundle, + }), + getConfig: (workspaceId: string) => + requestJson<{ opencode: Record; openwork: Record; updatedAt?: number | null }>( + baseUrl, + `/workspace/${workspaceId}/config`, + { token, hostToken, timeoutMs: timeouts.config }, + ), + patchConfig: (workspaceId: string, payload: { opencode?: Record; openwork?: Record }) => + requestJson<{ updatedAt?: number | null }>(baseUrl, `/workspace/${workspaceId}/config`, { + token, + hostToken, + method: "PATCH", + body: payload, + }), + readOpencodeConfigFile: (workspaceId: string, scope: "project" | "global" = "project") => { + const query = `?scope=${scope}`; + return requestJson(baseUrl, `/workspace/${encodeURIComponent(workspaceId)}/opencode-config${query}`, { + token, + hostToken, + }); + }, + writeOpencodeConfigFile: (workspaceId: string, scope: "project" | "global", content: string) => + requestJson(baseUrl, `/workspace/${encodeURIComponent(workspaceId)}/opencode-config`, { + token, + hostToken, + method: "POST", + body: { scope, content }, + }), + listReloadEvents: (workspaceId: string, options?: { since?: number }) => { + const query = typeof options?.since === "number" ? `?since=${options.since}` : ""; + return requestJson<{ items: OpenworkReloadEvent[]; cursor?: number }>( + baseUrl, + `/workspace/${workspaceId}/events${query}`, + { token, hostToken }, + ); + }, + reloadEngine: (workspaceId: string) => + requestJson<{ ok: boolean; reloadedAt?: number }>(baseUrl, `/workspace/${workspaceId}/engine/reload`, { + token, + hostToken, + method: "POST", + }), + listPlugins: (workspaceId: string, options?: { includeGlobal?: boolean }) => { + const query = options?.includeGlobal ? "?includeGlobal=true" : ""; + return requestJson<{ items: OpenworkPluginItem[]; loadOrder: string[] }>( + baseUrl, + `/workspace/${workspaceId}/plugins${query}`, + { token, hostToken }, + ); + }, + addPlugin: (workspaceId: string, spec: string) => + requestJson<{ items: OpenworkPluginItem[]; loadOrder: string[] }>( + baseUrl, + `/workspace/${workspaceId}/plugins`, + { token, hostToken, method: "POST", body: { spec } }, + ), + removePlugin: (workspaceId: string, name: string) => + requestJson<{ items: OpenworkPluginItem[]; loadOrder: string[] }>( + baseUrl, + `/workspace/${workspaceId}/plugins/${encodeURIComponent(name)}`, + { token, hostToken, method: "DELETE" }, + ), + listSkills: (workspaceId: string, options?: { includeGlobal?: boolean }) => { + const query = options?.includeGlobal ? "?includeGlobal=true" : ""; + return requestJson<{ items: OpenworkSkillItem[] }>( + baseUrl, + `/workspace/${workspaceId}/skills${query}`, + { token, hostToken }, + ); + }, + listHubSkills: (options?: { repo?: OpenworkHubRepo }) => { + const params = new URLSearchParams(); + const owner = options?.repo?.owner?.trim(); + const repo = options?.repo?.repo?.trim(); + const ref = options?.repo?.ref?.trim(); + if (owner) params.set("owner", owner); + if (repo) params.set("repo", repo); + if (ref) params.set("ref", ref); + const query = params.size ? `?${params.toString()}` : ""; + return requestJson<{ items: OpenworkHubSkillItem[] }>(baseUrl, `/hub/skills${query}`, { + token, + hostToken, + }); + }, + installHubSkill: ( + workspaceId: string, + name: string, + options?: { overwrite?: boolean; repo?: { owner?: string; repo?: string; ref?: string } }, + ) => + requestJson<{ ok: boolean; name: string; path: string; action: "added" | "updated"; written: number; skipped: number }>( + baseUrl, + `/workspace/${workspaceId}/skills/hub/${encodeURIComponent(name)}`, + { + token, + hostToken, + method: "POST", + body: { + ...(options?.overwrite ? { overwrite: true } : {}), + ...(options?.repo ? { repo: options.repo } : {}), + }, + }, + ), + getSkill: (workspaceId: string, name: string, options?: { includeGlobal?: boolean }) => { + const query = options?.includeGlobal ? "?includeGlobal=true" : ""; + return requestJson( + baseUrl, + `/workspace/${workspaceId}/skills/${encodeURIComponent(name)}${query}`, + { token, hostToken }, + ); + }, + upsertSkill: (workspaceId: string, payload: { name: string; content: string; description?: string }) => + requestJson(baseUrl, `/workspace/${workspaceId}/skills`, { + token, + hostToken, + method: "POST", + body: payload, + }), + deleteSkill: (workspaceId: string, name: string) => + requestJson<{ path: string }>( + baseUrl, + `/workspace/${workspaceId}/skills/${encodeURIComponent(name)}`, + { + token, + hostToken, + method: "DELETE", + }, + ), + listMcp: (workspaceId: string) => + requestJson<{ items: OpenworkMcpItem[] }>(baseUrl, `/workspace/${workspaceId}/mcp`, { token, hostToken }), + addMcp: (workspaceId: string, payload: { name: string; config: Record }) => + requestJson<{ items: OpenworkMcpItem[] }>(baseUrl, `/workspace/${workspaceId}/mcp`, { + token, + hostToken, + method: "POST", + body: payload, + }), + removeMcp: (workspaceId: string, name: string) => + requestJson<{ items: OpenworkMcpItem[] }>(baseUrl, `/workspace/${workspaceId}/mcp/${encodeURIComponent(name)}`, { + token, + hostToken, + method: "DELETE", + }), + setMcpEnabled: (workspaceId: string, name: string, enabled: boolean) => + requestJson<{ items: OpenworkMcpItem[] }>( + baseUrl, + `/workspace/${workspaceId}/mcp/${encodeURIComponent(name)}/enabled`, + { + token, + hostToken, + method: "POST", + body: { enabled }, + }, + ), + + logoutMcpAuth: (workspaceId: string, name: string) => + requestJson<{ ok: true }>(baseUrl, `/workspace/${workspaceId}/mcp/${encodeURIComponent(name)}/auth`, { + token, + hostToken, + method: "DELETE", + }), + + listCommands: (workspaceId: string, scope: "workspace" | "global" = "workspace") => + requestJson<{ items: OpenworkCommandItem[] }>( + baseUrl, + `/workspace/${workspaceId}/commands?scope=${scope}`, + { token, hostToken }, + ), + listAudit: (workspaceId: string, limit = 50) => + requestJson<{ items: OpenworkAuditEntry[] }>( + baseUrl, + `/workspace/${workspaceId}/audit?limit=${limit}`, + { token, hostToken }, + ), + upsertCommand: ( + workspaceId: string, + payload: { name: string; description?: string; template: string; agent?: string; model?: string | null; subtask?: boolean }, + ) => + requestJson<{ items: OpenworkCommandItem[] }>(baseUrl, `/workspace/${workspaceId}/commands`, { + token, + hostToken, + method: "POST", + body: payload, + }), + deleteCommand: (workspaceId: string, name: string) => + requestJson<{ ok: boolean }>(baseUrl, `/workspace/${workspaceId}/commands/${encodeURIComponent(name)}`, { + token, + hostToken, + method: "DELETE", + }), + uploadInbox: async (workspaceId: string, file: File, options?: { path?: string }) => { + const id = workspaceId.trim(); + if (!id) throw new Error("workspaceId is required"); + if (!file) throw new Error("file is required"); + const form = new FormData(); + form.append("file", file); + if (options?.path?.trim()) { + form.append("path", options.path.trim()); + } + + const result = await requestMultipartRaw(baseUrl, `/workspace/${encodeURIComponent(id)}/inbox`, { + token, + hostToken, + method: "POST", + body: form, + timeoutMs: timeouts.binary, + }); + + if (!result.ok) { + let message = result.text.trim(); + try { + const json = message ? JSON.parse(message) : null; + if (json && typeof json.message === "string") { + message = json.message; + } + } catch { + // ignore + } + throw new OpenworkServerError( + result.status, + "request_failed", + message || "Shared folder upload failed", + ); + } + + const body = result.text.trim(); + if (body) { + try { + const parsed = JSON.parse(body) as Partial; + if (typeof parsed.path === "string" && parsed.path.trim()) { + return { + ok: parsed.ok ?? true, + path: parsed.path.trim(), + bytes: typeof parsed.bytes === "number" ? parsed.bytes : file.size, + } satisfies OpenworkInboxUploadResult; + } + } catch { + // ignore invalid JSON and fall back + } + } + + return { + ok: true, + path: options?.path?.trim() || file.name, + bytes: file.size, + } satisfies OpenworkInboxUploadResult; + }, + + listInbox: (workspaceId: string) => + requestJson(baseUrl, `/workspace/${encodeURIComponent(workspaceId)}/inbox`, { + token, + hostToken, + }), + + downloadInboxItem: (workspaceId: string, inboxId: string) => + requestBinary( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/inbox/${encodeURIComponent(inboxId)}`, + { token, hostToken, timeoutMs: timeouts.binary }, + ), + + readWorkspaceFile: (workspaceId: string, path: string) => + requestJson( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/files/content?path=${encodeURIComponent(path)}`, + { token, hostToken }, + ), + + writeWorkspaceFile: ( + workspaceId: string, + payload: { path: string; content: string; baseUpdatedAt?: number | null; force?: boolean }, + ) => + requestJson( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/files/content`, + { + token, + hostToken, + method: "POST", + body: payload, + }, + ), + + listArtifacts: (workspaceId: string) => + requestJson(baseUrl, `/workspace/${encodeURIComponent(workspaceId)}/artifacts`, { + token, + hostToken, + }), + + downloadArtifact: (workspaceId: string, artifactId: string) => + requestBinary( + baseUrl, + `/workspace/${encodeURIComponent(workspaceId)}/artifacts/${encodeURIComponent(artifactId)}`, + { token, hostToken, timeoutMs: timeouts.binary }, + ), + + // User-level env vars (host-auth only — desktop shell is the sole caller). + // See apps/server/src/env-file.ts and apps/app/pr/environment-variables.md. + listUserEnvKeys: () => + requestJson<{ keys: string[] }>( + baseUrl, + "/env/keys", + { token, hostToken, timeoutMs: timeouts.config }, + ), + + listUserEnv: () => + requestJson<{ items: Array<{ key: string; value: string; updatedAt: number }> }>( + baseUrl, + "/env", + { token, hostToken, timeoutMs: timeouts.config }, + ), + + upsertUserEnv: (entries: Array<{ key: string; value: string }>) => + requestJson<{ ok: true; count: number }>(baseUrl, "/env", { + token, + hostToken, + method: "PUT", + body: { entries }, + timeoutMs: timeouts.config, + }), + + deleteUserEnv: (key: string) => + requestJson<{ ok: true }>(baseUrl, `/env/${encodeURIComponent(key)}`, { + token, + hostToken, + method: "DELETE", + timeoutMs: timeouts.config, + }), + }; +} + +export type OpenworkServerClient = ReturnType; diff --git a/apps/app/src/app/lib/perf-log.ts b/apps/app/src/app/lib/perf-log.ts new file mode 100644 index 0000000000..aa699fe353 --- /dev/null +++ b/apps/app/src/app/lib/perf-log.ts @@ -0,0 +1,144 @@ +import { recordDevLog } from "./dev-log"; + +export type PerfLogRecord = { + id: number; + at: string; + ts: number; + scope: string; + event: string; + payload?: Record; +}; + +type PerfRoot = typeof globalThis & { + __openworkPerfSeq?: number; + __openworkPerfLogs?: PerfLogRecord[]; + __openworkPerfConsoleAt?: Record; + __openworkPerfConsoleSuppressed?: Record; +}; + +const PERF_LOG_LIMIT = 500; +const HOT_EVENT_MIN_INTERVAL_MS = 750; +const HOT_EVENT_KEYS = new Set([ + "session.sse:flush", + "session.sse:arrival-gap", + "session.event:message.part.delta", + "session.event:message.part.updated", + "session.compaction:synthetic-continue", + "session.input:draft-flush", + "session.render:message-blocks", + "session.render:tool-summary", + "session.render:batch-commit", + "session.main-thread:lag", + "session.window:state", +]); + +export const perfNow = () => { + if (typeof performance !== "undefined" && typeof performance.now === "function") { + return performance.now(); + } + return Date.now(); +}; + +const round = (value: number) => Math.round(value * 100) / 100; + +export const recordPerfLog = ( + enabled: boolean, + scope: string, + event: string, + payload?: Record, +) => { + if (!enabled) return; + + const root = globalThis as PerfRoot; + const id = (root.__openworkPerfSeq ?? 0) + 1; + root.__openworkPerfSeq = id; + + const entry: PerfLogRecord = { + id, + at: new Date().toISOString(), + ts: Date.now(), + scope, + event, + payload, + }; + + const logs = root.__openworkPerfLogs ?? []; + logs.push(entry); + if (logs.length > PERF_LOG_LIMIT) { + logs.splice(0, logs.length - PERF_LOG_LIMIT); + } + root.__openworkPerfLogs = logs; + recordDevLog(enabled, { + level: "perf", + source: scope, + label: event, + payload, + }); + + try { + const key = `${scope}:${event}`; + const now = Date.now(); + const lastByKey = root.__openworkPerfConsoleAt ?? (root.__openworkPerfConsoleAt = {}); + const suppressedByKey = + root.__openworkPerfConsoleSuppressed ?? (root.__openworkPerfConsoleSuppressed = {}); + if (HOT_EVENT_KEYS.has(key)) { + const last = lastByKey[key] ?? 0; + if (now - last < HOT_EVENT_MIN_INTERVAL_MS) { + suppressedByKey[key] = (suppressedByKey[key] ?? 0) + 1; + return; + } + } + + lastByKey[key] = now; + const suppressed = suppressedByKey[key] ?? 0; + if (suppressed > 0) { + suppressedByKey[key] = 0; + } + + if (payload === undefined) { + if (suppressed > 0) { + console.log(`[OWPERF] ${scope}:${event}`, { suppressed }); + return; + } + console.log(`[OWPERF] ${scope}:${event}`); + return; + } + + if (suppressed > 0) { + console.log(`[OWPERF] ${scope}:${event}`, { ...payload, suppressed }); + return; + } + + console.log(`[OWPERF] ${scope}:${event}`, payload); + } catch { + // ignore + } +}; + +export const readPerfLogs = (limit = 120) => { + const root = globalThis as PerfRoot; + const logs = root.__openworkPerfLogs ?? []; + if (limit <= 0) return []; + if (logs.length <= limit) return logs.slice(); + return logs.slice(logs.length - limit); +}; + +export const clearPerfLogs = () => { + const root = globalThis as PerfRoot; + root.__openworkPerfLogs = []; + root.__openworkPerfSeq = 0; +}; + +export const finishPerf = ( + enabled: boolean, + scope: string, + event: string, + startedAt: number, + payload?: Record, +) => { + if (!enabled) return; + recordPerfLog(enabled, scope, event, { + ...(payload ?? {}), + ms: round(perfNow() - startedAt), + }); +}; diff --git a/apps/app/src/app/lib/publisher.ts b/apps/app/src/app/lib/publisher.ts new file mode 100644 index 0000000000..5efc2b8818 --- /dev/null +++ b/apps/app/src/app/lib/publisher.ts @@ -0,0 +1,80 @@ +export type OpenworkPublisherBundleType = "skill" | "skills-set"; + +export type PublishBundleResult = { + url: string; +}; + +const ENV_OPENWORK_PUBLISHER_BASE_URL = String(import.meta.env.VITE_OPENWORK_PUBLISHER_BASE_URL ?? "").trim(); + +export const DEFAULT_OPENWORK_PUBLISHER_BASE_URL = + ENV_OPENWORK_PUBLISHER_BASE_URL || "https://share.openworklabs.com"; + +function normalizeBaseUrl(input: string): string { + const trimmed = String(input ?? "").trim(); + if (!trimmed) { + throw new Error("Publisher baseUrl is required"); + } + return trimmed.replace(/\/+$/, ""); +} + +async function readErrorMessage(response: Response): Promise { + try { + const text = await response.text(); + if (!text.trim()) return ""; + try { + const json = JSON.parse(text) as Record; + if (json && typeof json.message === "string" && json.message.trim()) { + return json.message.trim(); + } + } catch { + // ignore + } + return text.trim(); + } catch { + return ""; + } +} + +export async function publishOpenworkBundleJson(input: { + payload: unknown; + bundleType: OpenworkPublisherBundleType; + name?: string; + baseUrl?: string; + timeoutMs?: number; +}): Promise { + const baseUrl = normalizeBaseUrl(input.baseUrl ?? DEFAULT_OPENWORK_PUBLISHER_BASE_URL); + const timeoutMs = typeof input.timeoutMs === "number" && Number.isFinite(input.timeoutMs) ? input.timeoutMs : 15_000; + + const controller = new AbortController(); + const timer = window.setTimeout(() => controller.abort(), Math.max(1_000, timeoutMs)); + + try { + const response = await fetch(`${baseUrl}/v1/bundles`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Accept: "application/json", + "X-OpenWork-Bundle-Type": input.bundleType, + "X-OpenWork-Schema-Version": "v1", + ...(input.name?.trim() ? { "X-OpenWork-Name": input.name.trim() } : null), + }, + body: JSON.stringify(input.payload), + signal: controller.signal, + }); + + if (!response.ok) { + const details = await readErrorMessage(response); + const suffix = details ? `: ${details}` : ""; + throw new Error(`Publish failed (${response.status})${suffix}`); + } + + const json = (await response.json()) as Record; + const url = typeof json.url === "string" ? json.url.trim() : ""; + if (!url) { + throw new Error("Publisher response missing url"); + } + return { url }; + } finally { + window.clearTimeout(timer); + } +} diff --git a/apps/app/src/app/lib/release-channels.ts b/apps/app/src/app/lib/release-channels.ts new file mode 100644 index 0000000000..e423baacb1 --- /dev/null +++ b/apps/app/src/app/lib/release-channels.ts @@ -0,0 +1,65 @@ +/** + * Release-channel concept for OpenWork desktop builds. + * + * There are two channels users can opt into: + * + * - "stable": the default. The desktop app auto-updates from the rolling + * "latest" GitHub release attached to whichever semver tag most recently + * finished the Release App workflow. macOS, Linux, Windows. + * + * - "alpha": a macOS-only rolling channel that auto-updates on every merge + * to `dev`. Alpha builds are published to a fixed GitHub release tag + * (`alpha-macos-latest`) so the updater endpoint stays stable while the + * underlying artifact is replaced on every dev push. + * + * Only the macOS (arm64) build is published to the alpha channel today. + * Linux and Windows always resolve to the stable channel. + */ + +import type { ReleaseChannel } from "../types"; + +/** Stable channel's Tauri updater manifest URL. */ +export const STABLE_UPDATER_ENDPOINT = + "https://github.com/different-ai/openwork/releases/latest/download/latest.json"; + +/** Alpha channel's Tauri updater manifest URL (macOS-only, rolling). */ +export const ALPHA_UPDATER_ENDPOINT = + "https://github.com/different-ai/openwork/releases/download/alpha-macos-latest/latest.json"; + +/** Rolling GitHub release tag that alpha macOS artifacts are published to. */ +export const ALPHA_MACOS_RELEASE_TAG = "alpha-macos-latest"; + +export type PlatformKind = "darwin" | "linux" | "windows" | "web" | "unknown"; + +/** + * Returns true when the given platform supports the alpha channel. + * + * Today alpha builds are produced only for macOS (arm64). The type-level + * conservatism here is deliberate: it's easier to widen later than to + * silently start advertising an alpha endpoint that serves no artifact. + */ +export function isAlphaChannelSupported(platform: PlatformKind): boolean { + return platform === "darwin"; +} + +/** + * Resolve the Tauri updater manifest URL for the requested channel. + * + * Falls back to the stable endpoint whenever alpha isn't supported on the + * current platform, so the caller never needs to special-case "alpha chosen + * on Linux" / "alpha chosen on Windows" etc. + */ +export function resolveUpdaterEndpoint( + channel: ReleaseChannel, + platform: PlatformKind = "darwin", +): string { + if (channel === "alpha" && isAlphaChannelSupported(platform)) { + return ALPHA_UPDATER_ENDPOINT; + } + return STABLE_UPDATER_ENDPOINT; +} + +/** Narrow an arbitrary string to a valid ReleaseChannel, defaulting to stable. */ +export function coerceReleaseChannel(value: unknown): ReleaseChannel { + return value === "alpha" ? "alpha" : "stable"; +} diff --git a/apps/app/src/app/lib/session-scope.ts b/apps/app/src/app/lib/session-scope.ts new file mode 100644 index 0000000000..a3e38ff6c7 --- /dev/null +++ b/apps/app/src/app/lib/session-scope.ts @@ -0,0 +1,101 @@ +import { normalizeDirectoryPath } from "../utils"; +import { normalizeDirectoryQueryPath } from "../utils"; + +/** + * Branded string for directory values sent over the wire to the OpenCode server. + * + * The server compares `session.directory === query.directory` with strict + * equality, so every call site that creates, lists, or deletes sessions must + * use the same canonical format. The brand makes it a *compile error* to pass + * a raw `string` where a `TransportDirectory` is expected — you must go + * through {@link toSessionTransportDirectory} first. + * + * On Windows this preserves native backslashes (`C:\Users\…`); on Unix it + * normalises to forward-slashed paths without a trailing separator. + */ +export type TransportDirectory = string & { + readonly __transportDirectory: unique symbol; +}; + +type WorkspaceType = "local" | "remote"; + +export function resolveScopedClientDirectory(input: { + directory?: string | null; + targetRoot?: string | null; + workspaceType?: WorkspaceType | null; +}): TransportDirectory { + const directory = toSessionTransportDirectory(input.directory); + if (directory) return directory; + + if (input.workspaceType === "remote") return "" as TransportDirectory; + + return toSessionTransportDirectory(input.targetRoot); +} + +/** + * Canonical formatter for directory values sent to the OpenCode server. + * + * Returns a {@link TransportDirectory} — the only format the server accepts for + * exact directory matching. All session create / list / delete calls must use + * this (or {@link resolveScopedClientDirectory}) instead of the local-only + * {@link normalizeDirectoryQueryPath}. + */ +export function toSessionTransportDirectory(input?: string | null): TransportDirectory { + const trimmed = (input ?? "").trim(); + if (!trimmed) return "" as TransportDirectory; + + if (/^\\\\\?\\UNC\\/i.test(trimmed)) { + return `\\${trimmed.slice(7)}` as TransportDirectory; + } + + if (/^\\\\\?\\[a-zA-Z]:[\\/]/.test(trimmed)) { + return trimmed.slice(4) as TransportDirectory; + } + + if (/^(?:[a-zA-Z]:[\\/]|\\\\)/.test(trimmed)) { + return trimmed as TransportDirectory; + } + + return normalizeDirectoryQueryPath(trimmed) as TransportDirectory; +} + +export function describeDirectoryScope(input?: string | null) { + const raw = input ?? ""; + const trimmed = raw.trim(); + const transport = toSessionTransportDirectory(trimmed); + const normalized = normalizeDirectoryPath(trimmed); + return { + raw: trimmed || null, + transport: (transport || null) as TransportDirectory | null, + normalized: normalized || null, + }; +} + +export function scopedRootsMatch(a?: string | null, b?: string | null) { + const left = normalizeDirectoryPath(a ?? ""); + const right = normalizeDirectoryPath(b ?? ""); + if (!left || !right) return false; + return left === right; +} + +export function shouldApplyScopedSessionLoad(input: { + loadedScopeRoot?: string | null; + workspaceRoot?: string | null; +}) { + const workspaceRoot = normalizeDirectoryPath(input.workspaceRoot ?? ""); + if (!workspaceRoot) return true; + return scopedRootsMatch(input.loadedScopeRoot, workspaceRoot); +} + +export function shouldRedirectMissingSessionAfterScopedLoad(input: { + loadedScopeRoot?: string | null; + workspaceRoot?: string | null; + hasMatchingSession: boolean; +}) { + if (input.hasMatchingSession) return false; + + const workspaceRoot = normalizeDirectoryPath(input.workspaceRoot ?? ""); + if (!workspaceRoot) return false; + + return scopedRootsMatch(input.loadedScopeRoot, workspaceRoot); +} diff --git a/apps/app/src/app/lib/session-title.ts b/apps/app/src/app/lib/session-title.ts new file mode 100644 index 0000000000..aea996de92 --- /dev/null +++ b/apps/app/src/app/lib/session-title.ts @@ -0,0 +1,22 @@ +import { t } from "../../i18n"; + +/** Raw English string — used for prefix matching against stored titles. */ +export const DEFAULT_SESSION_TITLE = "New session"; + +const GENERATED_SESSION_TITLE_PREFIX = `${DEFAULT_SESSION_TITLE} - `; + +export function isGeneratedSessionTitle(title: string | null | undefined) { + const trimmed = title?.trim() ?? ""; + if (!trimmed.startsWith(GENERATED_SESSION_TITLE_PREFIX)) return false; + const suffix = trimmed.slice(GENERATED_SESSION_TITLE_PREFIX.length).trim(); + return Boolean(suffix) && Number.isFinite(Date.parse(suffix)); +} + +export function getDisplaySessionTitle( + title: string | null | undefined, + fallback?: string, +) { + const trimmed = title?.trim() ?? ""; + if (!trimmed || isGeneratedSessionTitle(trimmed)) return fallback ?? t("session.default_title"); + return trimmed; +} diff --git a/apps/app/src/app/lib/startup-boot.ts b/apps/app/src/app/lib/startup-boot.ts new file mode 100644 index 0000000000..87cd9c5674 --- /dev/null +++ b/apps/app/src/app/lib/startup-boot.ts @@ -0,0 +1,56 @@ +export type BootPhase = + | "nativeInit" + | "workspaceBootstrap" + | "engineProbe" + | "engineStartOrConnect" + | "sessionIndexReady" + | "firstSessionReady" + | "ready" + | "error"; + +export type StartupBranch = + | "firstRunNoWorkspace" + | "remoteWorkspace" + | "localAttachExisting" + | "localHostStart" + | "serverPreference" + | "localPreference" + | "welcome" + | "unknown"; + +export type StartupTraceEvent = { + at: number; + phase: BootPhase; + event: string; + detail?: Record; +}; + +export function classifyStartupBranch(input: { + workspaceCount: number; + activeWorkspaceType: "local" | "remote" | null; + startupPreference: "local" | "server" | null; + engineHasBaseUrl: boolean; + selectedWorkspacePath: string; +}): StartupBranch { + if (input.workspaceCount === 0) return "firstRunNoWorkspace"; + if (input.activeWorkspaceType === "remote") return "remoteWorkspace"; + if (input.startupPreference === "server") return "serverPreference"; + if (!input.selectedWorkspacePath.trim()) { + if (input.startupPreference === "local") return "localPreference"; + return "welcome"; + } + return input.engineHasBaseUrl ? "localAttachExisting" : "localHostStart"; +} + +export function pushStartupTraceEvent( + current: StartupTraceEvent[], + event: StartupTraceEvent, + maxEvents = 100, +): StartupTraceEvent[] { + if (!Number.isFinite(event.at) || !event.phase || !event.event) { + return current; + } + const base = current.length >= maxEvents ? current.slice(current.length - maxEvents + 1) : current.slice(); + base.push(event); + return base; +} diff --git a/apps/app/src/app/lib/tauri.ts b/apps/app/src/app/lib/tauri.ts new file mode 100644 index 0000000000..9c4641a4bc --- /dev/null +++ b/apps/app/src/app/lib/tauri.ts @@ -0,0 +1 @@ +export * from "./desktop"; diff --git a/apps/app/src/app/lib/version-gate.ts b/apps/app/src/app/lib/version-gate.ts new file mode 100644 index 0000000000..64d8b92d9c --- /dev/null +++ b/apps/app/src/app/lib/version-gate.ts @@ -0,0 +1,222 @@ +// Version comparator + update gating helpers. +// +// Ported from dev's Solid system-state.ts (#1476 + #1512). Pure functions +// so they're reusable from any React feature site once the updater flow +// gets wired. + +import { createDenClient, readDenSettings, type DenDesktopConfig } from "./den"; + +type ParsedVersion = { + release: number[]; + prerelease: string[]; +}; + +function parseComparableVersion(value: string): ParsedVersion | null { + const normalized = value.trim().replace(/^v/i, ""); + if (!normalized) return null; + + const [versionCore] = normalized.split("+", 1); + if (!versionCore) return null; + + const [releasePart, prereleasePart = ""] = versionCore.split("-", 2); + const release = releasePart.split(".").map((segment) => Number(segment)); + if (!release.length || release.some((segment) => !Number.isInteger(segment) || segment < 0)) { + return null; + } + + const prerelease = prereleasePart + .split(".") + .map((segment) => segment.trim()) + .filter(Boolean); + + return { release, prerelease }; +} + +function comparePrereleaseIdentifiers(left: string[], right: string[]): number { + // semver-ish: absence of prerelease ranks higher than presence. + if (!left.length && !right.length) return 0; + if (!left.length) return 1; + if (!right.length) return -1; + + const count = Math.max(left.length, right.length); + for (let index = 0; index < count; index += 1) { + const leftPart = left[index]; + const rightPart = right[index]; + if (leftPart === undefined) return -1; + if (rightPart === undefined) return 1; + + const leftNumeric = /^\d+$/.test(leftPart) ? Number(leftPart) : null; + const rightNumeric = /^\d+$/.test(rightPart) ? Number(rightPart) : null; + + if (leftNumeric !== null && rightNumeric !== null) { + if (leftNumeric !== rightNumeric) return leftNumeric < rightNumeric ? -1 : 1; + continue; + } + + if (leftNumeric !== null) return -1; + if (rightNumeric !== null) return 1; + + const comparison = leftPart.localeCompare(rightPart); + if (comparison !== 0) return comparison < 0 ? -1 : 1; + } + + return 0; +} + +function releasePart(value: string): number[] | null { + return parseComparableVersion(value)?.release ?? null; +} + +/** + * Compare two version strings. Returns -1 / 0 / 1 as usual, or null if + * either side fails to parse. Accepts an optional leading `v` and handles + * prerelease tags (e.g. `0.11.212-alpha.3`). + */ +export function compareVersions(left: string, right: string): number | null { + const parsedLeft = parseComparableVersion(left); + const parsedRight = parseComparableVersion(right); + if (!parsedLeft || !parsedRight) return null; + + const count = Math.max(parsedLeft.release.length, parsedRight.release.length); + for (let index = 0; index < count; index += 1) { + const leftPart = parsedLeft.release[index] ?? 0; + const rightPart = parsedRight.release[index] ?? 0; + if (leftPart !== rightPart) return leftPart < rightPart ? -1 : 1; + } + + return comparePrereleaseIdentifiers(parsedLeft.prerelease, parsedRight.prerelease); +} + +/** + * Apply the org-level `allowedDesktopVersions` filter (dev #1512). When + * the array is unset, everything is allowed; when it's set, the candidate + * update version must match one of the allowed versions exactly (by + * semver comparison, so leading `v` prefixes and trailing build metadata + * are treated equivalently). + */ +export function isUpdateAllowedByDesktopConfig( + updateVersion: string, + desktopConfig: DenDesktopConfig | null | undefined, +): boolean { + if (!Array.isArray(desktopConfig?.allowedDesktopVersions)) { + return true; + } + + return desktopConfig.allowedDesktopVersions.some( + (allowedVersion) => compareVersions(updateVersion, allowedVersion) === 0, + ); +} + +function maxAllowedDesktopVersion(desktopConfig: DenDesktopConfig | null | undefined): string | null { + if (!Array.isArray(desktopConfig?.allowedDesktopVersions)) { + return null; + } + + let maxVersion: string | null = null; + for (const version of desktopConfig.allowedDesktopVersions) { + if (parseComparableVersion(version) === null) continue; + if (maxVersion === null) { + maxVersion = version; + continue; + } + const comparison = compareVersions(version, maxVersion); + if (comparison !== null && comparison > 0) { + maxVersion = version; + } + } + return maxVersion; +} + +function effectiveMaxDesktopVersion( + denLatestAppVersion: string, + desktopConfig: DenDesktopConfig | null | undefined, +): string { + const orgMaxVersion = maxAllowedDesktopVersion(desktopConfig); + if (!orgMaxVersion) return denLatestAppVersion; + const comparison = compareVersions(orgMaxVersion, denLatestAppVersion); + return comparison !== null && comparison < 0 ? orgMaxVersion : denLatestAppVersion; +} + +function isWithinOnePatchAhead(updateVersion: string, maxVersion: string): boolean { + const directComparison = compareVersions(updateVersion, maxVersion); + if (directComparison !== null && directComparison <= 0) { + return true; + } + + const updateRelease = releasePart(updateVersion); + const maxRelease = releasePart(maxVersion); + if (!updateRelease || !maxRelease) return false; + + const updateMajor = updateRelease[0] ?? 0; + const updateMinor = updateRelease[1] ?? 0; + const updatePatch = updateRelease[2] ?? 0; + const maxMajor = maxRelease[0] ?? 0; + const maxMinor = maxRelease[1] ?? 0; + const maxPatch = maxRelease[2] ?? 0; + + return updateMajor === maxMajor && updateMinor === maxMinor && updatePatch <= maxPatch + 1; +} + +async function readDenLatestAppVersion(): Promise { + try { + const settings = readDenSettings(); + const token = settings.authToken?.trim() ?? ""; + const client = createDenClient({ + baseUrl: settings.baseUrl, + apiBaseUrl: settings.apiBaseUrl, + ...(token ? { token } : {}), + }); + const metadata = await client.getAppVersionMetadata(); + return metadata.latestAppVersion; + } catch { + return null; + } +} + +/** + * Ask Den for the currently-supported latest app version (dev #1476) and + * return true only when the candidate update version is the latest + * version or older. If Den is unreachable or returns an invalid payload, + * this returns `false` — the caller must treat that as "do not surface + * the update". + * + * No-op safe: callers can invoke this without any Den auth; the client + * will omit the token when none is persisted. + */ +export async function isUpdateSupportedByDen(updateVersion: string): Promise { + const latestAppVersion = await readDenLatestAppVersion(); + if (!latestAppVersion) return false; + const comparison = compareVersions(updateVersion, latestAppVersion); + return comparison !== null && comparison <= 0; +} + +/** + * Alpha channel builds may run one patch ahead of the current Den/org maximum + * (e.g. Den allows 0.13.3, alpha 0.13.4-alpha.N is allowed). Larger jumps are + * still blocked so alpha cannot bypass staged rollout ceilings entirely. + */ +export async function isAlphaUpdateAllowed( + updateVersion: string, + desktopConfig: DenDesktopConfig | null | undefined, +): Promise { + const latestAppVersion = await readDenLatestAppVersion(); + if (!latestAppVersion) return false; + const effectiveMaxVersion = effectiveMaxDesktopVersion(latestAppVersion, desktopConfig); + return isWithinOnePatchAhead(updateVersion, effectiveMaxVersion); +} + +/** + * Combined gate: the update must be supported by Den (version metadata + * endpoint) AND allowed by the active org's `allowedDesktopVersions` if + * one is configured. Intended to be the single call site the React + * updater flow makes before surfacing an update as installable. + */ +export async function isUpdateAllowed( + updateVersion: string, + desktopConfig: DenDesktopConfig | null | undefined, +): Promise { + if (!isUpdateAllowedByDesktopConfig(updateVersion, desktopConfig)) { + return false; + } + return isUpdateSupportedByDen(updateVersion); +} diff --git a/apps/app/src/app/lib/workspace-blueprints.ts b/apps/app/src/app/lib/workspace-blueprints.ts new file mode 100644 index 0000000000..671f293385 --- /dev/null +++ b/apps/app/src/app/lib/workspace-blueprints.ts @@ -0,0 +1,248 @@ +import type { + WorkspaceBlueprint, + WorkspaceBlueprintMaterializedSession, + WorkspaceBlueprintSessionMessage, + WorkspaceBlueprintSessionTemplate, + WorkspaceBlueprintStarter, + WorkspaceOpenworkConfig, +} from "../types"; +import { parseTemplateFrontmatter } from "../utils"; +import { t } from "../../i18n"; + +import browserSetupTemplate from "../data/commands/browser-setup.md?raw"; + +const BROWSER_AUTOMATION_QUICKSTART_PROMPT = (() => { + const parsed = parseTemplateFrontmatter(browserSetupTemplate); + return (parsed?.body ?? browserSetupTemplate).trim(); +})(); + + +const defaultWelcomeBlueprintMessages = (): WorkspaceBlueprintSessionMessage[] => [ + { + role: "assistant", + text: t("blueprint.welcome_message"), + }, +]; + +export function defaultBlueprintSessionsForPreset(_preset: string): WorkspaceBlueprintSessionTemplate[] { + return [ + { + id: "welcome-to-openwork", + title: t("blueprint.welcome_title"), + messages: defaultWelcomeBlueprintMessages(), + openOnFirstLoad: true, + }, + { + id: "csv-playbook", + title: t("blueprint.csv_session_title"), + messages: [ + { + role: "assistant", + text: t("blueprint.csv_session_assistant"), + }, + { + role: "user", + text: t("blueprint.csv_session_user"), + }, + ], + openOnFirstLoad: false, + }, + ]; +} + +function normalizeSessionMessage(value: unknown): WorkspaceBlueprintSessionMessage | null { + if (!value || typeof value !== "object" || Array.isArray(value)) return null; + const record = value as Record; + const text = typeof record.text === "string" ? record.text.trim() : ""; + if (!text) return null; + const role = String(record.role ?? "assistant").trim().toLowerCase() === "user" ? "user" : "assistant"; + return { role, text }; +} + +function normalizeSessionTemplate(value: unknown, index: number): WorkspaceBlueprintSessionTemplate | null { + if (!value || typeof value !== "object" || Array.isArray(value)) return null; + const record = value as Record; + const title = typeof record.title === "string" ? record.title.trim() : ""; + const id = typeof record.id === "string" && record.id.trim() ? record.id.trim() : `template-session-${index + 1}`; + const messages = Array.isArray(record.messages) + ? record.messages.map(normalizeSessionMessage).filter((item): item is WorkspaceBlueprintSessionMessage => Boolean(item)) + : []; + if (!title && messages.length === 0) return null; + return { + id, + title: title || null, + messages, + openOnFirstLoad: record.openOnFirstLoad === true, + }; +} + +function normalizeMaterializedSession(value: unknown): WorkspaceBlueprintMaterializedSession | null { + if (!value || typeof value !== "object" || Array.isArray(value)) return null; + const record = value as Record; + const sessionId = typeof record.sessionId === "string" ? record.sessionId.trim() : ""; + const templateId = typeof record.templateId === "string" ? record.templateId.trim() : ""; + if (!sessionId || !templateId) return null; + return { sessionId, templateId }; +} + +function normalizeBlueprint(value: unknown): WorkspaceBlueprint | null { + if (!value || typeof value !== "object" || Array.isArray(value)) return null; + const candidate = value as WorkspaceBlueprint & Record; + const sessions = Array.isArray(candidate.sessions) + ? candidate.sessions + .map((session, index) => normalizeSessionTemplate(session, index)) + .filter((item): item is WorkspaceBlueprintSessionTemplate => Boolean(item)) + : null; + const materializedSessions = Array.isArray(candidate.materialized?.sessions?.items) + ? candidate.materialized?.sessions?.items + .map(normalizeMaterializedSession) + .filter((item): item is WorkspaceBlueprintMaterializedSession => Boolean(item)) + : null; + + return { + emptyState: candidate.emptyState ?? null, + sessions, + materialized: candidate.materialized + ? { + sessions: candidate.materialized.sessions + ? { + hydratedAt: + typeof candidate.materialized.sessions.hydratedAt === "number" + ? candidate.materialized.sessions.hydratedAt + : null, + items: materializedSessions, + } + : null, + } + : null, + }; +} + +export function defaultBlueprintStartersForPreset(preset: string): WorkspaceBlueprintStarter[] { + switch (preset.trim().toLowerCase()) { + case "automation": + return [ + { + id: "automation-command", + kind: "prompt", + title: t("blueprint.starter_command_title"), + description: t("blueprint.starter_command_desc"), + prompt: t("blueprint.starter_command_prompt"), + }, + { + id: "automation-blueprint", + kind: "session", + title: t("blueprint.starter_blueprint_title"), + description: t("blueprint.starter_blueprint_desc"), + prompt: t("blueprint.starter_blueprint_prompt"), + }, + ]; + case "minimal": + return [ + { + id: "minimal-explore", + kind: "prompt", + title: t("blueprint.starter_explore_title"), + description: t("blueprint.starter_explore_desc"), + prompt: t("blueprint.starter_explore_prompt"), + }, + ]; + default: + return [ + { + id: "csv-help", + kind: "prompt", + title: t("blueprint.starter_csv_title"), + description: t("blueprint.starter_csv_desc"), + prompt: t("blueprint.starter_csv_prompt"), + }, + { + id: "starter-connect-openai", + kind: "action", + title: t("blueprint.starter_connect_openai_title"), + description: t("blueprint.starter_connect_openai_desc"), + action: "connect-openai", + }, + { + id: "browser-automation", + kind: "session", + title: t("blueprint.starter_chrome_title"), + description: t("blueprint.starter_chrome_desc"), + prompt: t("blueprint.starter_chrome_prompt"), + }, + ]; + } +} + +export function defaultBlueprintCopyForPreset(preset: string) { + switch (preset.trim().toLowerCase()) { + case "automation": + return { + title: t("blueprint.automation_title"), + body: t("blueprint.automation_body"), + }; + case "minimal": + return { + title: t("blueprint.minimal_title"), + body: t("blueprint.minimal_body"), + }; + default: + return { + title: t("blueprint.empty_title"), + body: t("blueprint.empty_body"), + }; + } +} + +export function buildDefaultWorkspaceBlueprint(preset: string): WorkspaceBlueprint { + const copy = defaultBlueprintCopyForPreset(preset); + return { + emptyState: { + title: copy.title, + body: copy.body, + starters: defaultBlueprintStartersForPreset(preset), + }, + sessions: defaultBlueprintSessionsForPreset(preset), + }; +} + +export function blueprintSessions(config: WorkspaceOpenworkConfig | null | undefined): WorkspaceBlueprintSessionTemplate[] { + return Array.isArray(config?.blueprint?.sessions) + ? config!.blueprint!.sessions!.filter((item): item is WorkspaceBlueprintSessionTemplate => Boolean(item)) + : []; +} + +export function blueprintMaterializedSessions(config: WorkspaceOpenworkConfig | null | undefined): WorkspaceBlueprintMaterializedSession[] { + return Array.isArray(config?.blueprint?.materialized?.sessions?.items) + ? config!.blueprint!.materialized!.sessions!.items!.filter((item): item is WorkspaceBlueprintMaterializedSession => Boolean(item)) + : []; +} + +export function normalizeWorkspaceOpenworkConfig( + value: unknown, + preset?: string | null, +): WorkspaceOpenworkConfig { + const candidate = + value && typeof value === "object" + ? (value as Partial) + : {}; + + const normalizedPreset = + candidate.workspace?.preset?.trim() || preset?.trim() || null; + + return { + version: typeof candidate.version === "number" ? candidate.version : 1, + workspace: + candidate.workspace || normalizedPreset + ? { + ...(candidate.workspace ?? {}), + preset: normalizedPreset, + } + : null, + authorizedRoots: Array.isArray(candidate.authorizedRoots) + ? candidate.authorizedRoots.filter((item): item is string => typeof item === "string") + : [], + blueprint: normalizeBlueprint(candidate.blueprint), + reload: candidate.reload ?? null, + }; +} diff --git a/apps/app/src/app/mcp.ts b/apps/app/src/app/mcp.ts new file mode 100644 index 0000000000..b96917d296 --- /dev/null +++ b/apps/app/src/app/mcp.ts @@ -0,0 +1,113 @@ +import { applyEdits, modify, parse, printParseErrorCode } from "jsonc-parser"; +import type { McpServerConfig, McpServerEntry } from "./types"; +import { readOpencodeConfig, writeOpencodeConfig } from "./lib/desktop"; +import { CHROME_DEVTOOLS_MCP_COMMAND, CHROME_DEVTOOLS_MCP_ID } from "./constants"; + +type McpConfigValue = Record | null | undefined; + +export const CHROME_DEVTOOLS_AUTO_CONNECT_ARG = "--autoConnect"; + +type McpIdentity = { + id?: string; + name: string; +}; + +export function normalizeMcpSlug(name: string): string { + return name.toLowerCase().replace(/[^a-z0-9]+/g, "-"); +} + +export function getMcpIdentityKey(entry: McpIdentity): string { + return entry.id ?? normalizeMcpSlug(entry.name); +} + +export function isChromeDevtoolsMcp(entry: McpIdentity | string | null | undefined): boolean { + if (!entry) return false; + const key = typeof entry === "string" ? entry : getMcpIdentityKey(entry); + return key === CHROME_DEVTOOLS_MCP_ID || normalizeMcpSlug(typeof entry === "string" ? entry : entry.name) === "control-chrome"; +} + +export function usesChromeDevtoolsAutoConnect(command?: string[]): boolean { + return Array.isArray(command) && command.includes(CHROME_DEVTOOLS_AUTO_CONNECT_ARG); +} + +export function buildChromeDevtoolsCommand(command: string[] | undefined, useExistingProfile: boolean): string[] { + const base = Array.isArray(command) && command.length + ? command.filter((part) => part !== CHROME_DEVTOOLS_AUTO_CONNECT_ARG) + : [...CHROME_DEVTOOLS_MCP_COMMAND]; + return useExistingProfile ? [...base, CHROME_DEVTOOLS_AUTO_CONNECT_ARG] : base; +} + +export function validateMcpServerName(name: string): string { + const trimmed = name.trim(); + if (!trimmed) { + throw new Error("server_name is required"); + } + if (trimmed.startsWith("-")) { + throw new Error("server_name must not start with '-'"); + } + if (!/^[A-Za-z0-9_-]+$/.test(trimmed)) { + throw new Error("server_name must be alphanumeric with '-' or '_'"); + } + return trimmed; +} + +export async function removeMcpFromConfig( + projectDir: string, + name: string, +): Promise { + const configFile = await readOpencodeConfig("project", projectDir); + const raw = configFile.exists && configFile.content?.trim() + ? configFile.content + : "{}\n"; + + const parseErrors: Array<{ error: number; offset: number; length: number }> = []; + const existingConfig = parse(raw, parseErrors, { allowTrailingComma: true }) as Record | undefined; + if (parseErrors.length > 0) { + const details = parseErrors + .map((entry) => printParseErrorCode(entry.error)) + .join(", "); + throw new Error(`Failed to parse opencode config: ${details}`); + } + + const mcpSection = existingConfig?.["mcp"] as Record | undefined; + if (!mcpSection || !(name in mcpSection)) return; + + const formattingOptions = { insertSpaces: true, tabSize: 2, eol: "\n" }; + const updated = applyEdits(raw, modify(raw, ["mcp", name], undefined, { formattingOptions })); + const writeResult = await writeOpencodeConfig( + "project", + projectDir, + updated.endsWith("\n") ? updated : `${updated}\n`, + ); + if (!writeResult.ok) { + throw new Error(writeResult.stderr || writeResult.stdout || "Failed to write opencode.json"); + } +} + +export function parseMcpServersFromContent(content: string): McpServerEntry[] { + if (!content.trim()) return []; + + try { + const parsed = parse(content) as Record | undefined; + const mcp = parsed?.mcp as McpConfigValue; + + if (!mcp || typeof mcp !== "object") { + return []; + } + + return Object.entries(mcp).flatMap(([name, value]) => { + if (!value || typeof value !== "object") { + return []; + } + + const config = value as McpServerConfig; + if (config.type !== "remote" && config.type !== "local") { + return []; + } + + return [{ name, config, source: "config.project" as const }]; + }); + } catch { + return []; + } +} diff --git a/apps/app/src/app/theme.ts b/apps/app/src/app/theme.ts new file mode 100644 index 0000000000..efd5e24f45 --- /dev/null +++ b/apps/app/src/app/theme.ts @@ -0,0 +1,62 @@ +export type ThemeMode = "light" | "dark" | "system"; + +const THEME_PREF_KEY = "openwork.themePref"; + +const mediaQuery = "(prefers-color-scheme: dark)"; + +const getMediaQueryList = () => + typeof window === "undefined" ? null : window.matchMedia(mediaQuery); + +const readStoredMode = (): ThemeMode => { + if (typeof window === "undefined") return "system"; + try { + const stored = window.localStorage.getItem(THEME_PREF_KEY); + if (stored === "light" || stored === "dark" || stored === "system") { + return stored; + } + } catch { + // ignore + } + return "system"; +}; + +const resolveMode = (mode: ThemeMode) => { + if (mode !== "system") return mode; + return getMediaQueryList()?.matches ? "dark" : "light"; +}; + +const applyTheme = (mode: ThemeMode) => { + if (typeof document === "undefined") return; + const resolved = resolveMode(mode); + document.documentElement.dataset.theme = resolved; + document.documentElement.style.colorScheme = resolved; +}; + +export const bootstrapTheme = () => { + const mode = readStoredMode(); + applyTheme(mode); +}; + +export const getInitialThemeMode = () => readStoredMode(); + +export const persistThemeMode = (mode: ThemeMode) => { + if (typeof window === "undefined") return; + try { + window.localStorage.setItem(THEME_PREF_KEY, mode); + } catch { + // ignore + } +}; + +export const subscribeToSystemTheme = (onChange: (isDark: boolean) => void) => { + const list = getMediaQueryList(); + if (!list) return () => undefined; + + const handler = (event: MediaQueryListEvent) => onChange(event.matches); + list.addEventListener("change", handler); + return () => list.removeEventListener("change", handler); +}; + +export const applyThemeMode = (mode: ThemeMode) => { + applyTheme(mode); +}; diff --git a/apps/app/src/app/types.ts b/apps/app/src/app/types.ts new file mode 100644 index 0000000000..9b215e0dae --- /dev/null +++ b/apps/app/src/app/types.ts @@ -0,0 +1,445 @@ +import type { + Message, + Part, + PermissionRequest as ApiPermissionRequest, + QuestionRequest, + ProviderListResponse, + Session, +} from "@opencode-ai/sdk/v2/client"; +import type { createClient } from "./lib/opencode"; +import type { OpencodeConfigFile, WorkspaceInfo } from "./lib/desktop"; + +export type Client = ReturnType; + +export type ProviderListItem = ProviderListResponse["all"][number]; + +export type SidebarSessionItem = { + id: string; + title: string; + slug?: string | null; + parentID?: string | null; + time?: { + updated?: number | null; + created?: number | null; + }; + directory?: string | null; +}; + +export type WorkspaceSessionGroup = { + workspace: WorkspaceInfo; + sessions: SidebarSessionItem[]; + status: "idle" | "loading" | "ready" | "error"; + error?: string | null; +}; + +export type PlaceholderMessageInfo = { + id: string; + sessionID: string; + role: "assistant" | "user"; + time: { + created: number; + completed?: number; + }; + parentID: string; + modelID: string; + providerID: string; + mode: string; + agent: string; + path: { + cwd: string; + root: string; + }; + cost: number; + tokens: { + input: number; + output: number; + reasoning: number; + cache: { + read: number; + write: number; + }; + }; +}; + +export type PlaceholderAssistantMessage = PlaceholderMessageInfo & { + role: "assistant"; +}; + +export type MessageInfo = Message | PlaceholderMessageInfo; + +export type MessageWithParts = { + info: MessageInfo; + parts: Part[]; +}; + +export type SessionErrorTurn = { + id: string; + text: string; + afterMessageID: string | null; + time: number; +}; + +export const SYNTHETIC_SESSION_ERROR_MESSAGE_PREFIX = "session-error:"; + +export type StepGroupMode = "exploration" | "standalone"; + +export type MessageGroup = + | { kind: "text"; part: Part; segment: "intent" | "result" } + | { kind: "steps"; id: string; parts: Part[]; segment: "execution"; mode: StepGroupMode }; + +export type PromptMode = "prompt" | "shell"; + +export type ComposerPart = + | { type: "text"; text: string } + | { type: "agent"; name: string } + | { type: "file"; path: string; label?: string } + | { type: "paste"; id: string; label: string; text: string; lines: number }; + +export type ComposerAttachment = { + id: string; + name: string; + mimeType: string; + size: number; + kind: "image" | "file"; + file: File; + previewUrl?: string; +}; + +export type SlashCommandOption = { + id: string; + name: string; + description?: string; + source?: "command" | "mcp" | "skill"; +}; + +export type ComposerDraft = { + mode: PromptMode; + parts: ComposerPart[]; + attachments: ComposerAttachment[]; + /** Editor-visible text (may include collapsed paste placeholders). */ + text: string; + /** + * Resolved text to send to the model. + * When a paste is collapsed into a placeholder (e.g. "[pasted text 1]"), + * this includes the full pasted text instead. + */ + resolvedText?: string; + /** When set, draft is a slash command invocation */ + command?: { name: string; arguments: string } | undefined; +}; + +export type ArtifactItem = { + id: string; + name: string; + path?: string; + kind: "file" | "text"; + size?: string; + messageId?: string; +}; + +export type OpencodeEvent = { + type: string; + properties?: unknown; +}; + +export type SessionCompactionState = { + running: boolean; + startedAt: number | null; + finishedAt: number | null; + mode: "auto" | "manual" | null; + messageID: string | null; +}; + +export type View = "settings" | "session" | "signin"; + +export type StartupPreference = "local" | "server"; + +/** + * Release channel the desktop app is subscribed to. + * + * - "stable": default. Auto-updates from the rolling stable GitHub release. + * - "alpha": macOS-only. Auto-updates from the rolling alpha release that + * every merge to `dev` publishes to. + * + * See `apps/app/src/app/lib/release-channels.ts` for URL resolution. + */ +export type ReleaseChannel = "stable" | "alpha"; + +export type EngineRuntime = "direct"; + +export type OnboardingStep = "welcome" | "local" | "server" | "connecting"; + +export type SettingsTab = + | "general" + | "den" + | "skills" + | "extensions" + | "environment" + | "advanced" + | "appearance" + | "updates" + | "recovery" + | "debug"; + +export type WorkspacePreset = "starter" | "automation" | "minimal"; + +export type WorkspaceConnectionStatus = "idle" | "connecting" | "connected" | "error"; + +export type WorkspaceConnectionState = { + status: WorkspaceConnectionStatus; + message?: string | null; + checkedAt?: number | null; +}; + +export type ResetOpenworkMode = "onboarding" | "all"; + +export type WorkspaceBlueprintStarterKind = "prompt" | "session" | "action"; + +export type WorkspaceBlueprintStarterAction = "connect-openai"; + +export type WorkspaceBlueprintStarter = { + id?: string | null; + kind?: WorkspaceBlueprintStarterKind | null; + title?: string | null; + description?: string | null; + prompt?: string | null; + action?: WorkspaceBlueprintStarterAction | null; +}; + +export type WorkspaceBlueprintSessionMessageRole = "assistant" | "user"; + +export type WorkspaceBlueprintSessionMessage = { + role?: WorkspaceBlueprintSessionMessageRole | null; + text?: string | null; +}; + +export type WorkspaceBlueprintSessionTemplate = { + id?: string | null; + title?: string | null; + messages?: WorkspaceBlueprintSessionMessage[] | null; + openOnFirstLoad?: boolean | null; +}; + +export type WorkspaceBlueprintMaterializedSession = { + templateId?: string | null; + sessionId?: string | null; +}; + +export type WorkspaceBlueprintMaterializedSessions = { + hydratedAt?: number | null; + items?: WorkspaceBlueprintMaterializedSession[] | null; +}; + +export type WorkspaceBlueprintEmptyState = { + title?: string | null; + body?: string | null; + starters?: WorkspaceBlueprintStarter[] | null; +}; + +export type WorkspaceBlueprint = { + emptyState?: WorkspaceBlueprintEmptyState | null; + sessions?: WorkspaceBlueprintSessionTemplate[] | null; + materialized?: { + sessions?: WorkspaceBlueprintMaterializedSessions | null; + } | null; +}; + +export type WorkspaceOpenworkConfig = { + version: number; + workspace?: { + name?: string | null; + createdAt?: number | null; + preset?: string | null; + } | null; + authorizedRoots: string[]; + blueprint?: WorkspaceBlueprint | null; + reload?: { + auto?: boolean; + resume?: boolean; + } | null; +}; + +export type SkillCard = { + name: string; + path: string; + description?: string; + trigger?: string; +}; + +export type HubSkillRepo = { + owner: string; + repo: string; + ref: string; +}; + +export type HubSkillCard = { + name: string; + description?: string; + trigger?: string; + source: HubSkillRepo & { + path: string; + }; +}; + +/** OpenWork Cloud (Den) org skill surfaced in the Skills catalog (team hub + shared). */ +export type DenOrgSkillCard = { + id: string; + title: string; + description: string | null; + skillText: string; + hubName: string | null; + shared: "org" | "public" | null; + updatedAt: string | null; +}; + +export type PluginInstallStep = { + title: string; + description: string; + command?: string; + url?: string; + path?: string; + note?: string; +}; + +export type SuggestedPlugin = { + name: string; + packageName: string; + description: string; + tags: string[]; + aliases?: string[]; + installMode?: "simple" | "guided"; + steps?: PluginInstallStep[]; +}; + +export type PluginScope = "project" | "global"; + +export type McpServerSource = "config.project" | "config.global" | "config.remote"; + +export type McpServerConfig = { + type: "remote" | "local"; + url?: string; + command?: string[]; + enabled?: boolean; + headers?: Record; + environment?: Record; + oauth?: Record | false; + timeout?: number; +}; + +export type McpServerEntry = { + name: string; + config: McpServerConfig; + source?: McpServerSource; +}; + +export type McpStatus = + | { status: "connected" } + | { status: "disabled" } + | { status: "failed"; error: string } + | { status: "needs_auth" } + | { status: "needs_client_registration"; error: string }; + +export type McpStatusMap = Record; + +export type ReloadReason = "plugins" | "skills" | "mcp" | "config" | "agents" | "commands"; + +export type OpencodeConnectStatus = { + at: number; + baseUrl: string; + directory?: string | null; + reason?: string | null; + status: "connecting" | "connected" | "error"; + error?: string | null; + metrics?: { + healthyMs?: number; + loadSessionsMs?: number; + pendingPermissionsMs?: number; + providersMs?: number; + totalMs?: number; + }; +}; + +export type ReloadTrigger = { + type: "skill" | "plugin" | "config" | "mcp" | "agent" | "command"; + name?: string; + action?: "added" | "removed" | "updated"; + path?: string; +}; + +export type PendingPermission = ApiPermissionRequest & { + receivedAt: number; +}; + +export type PendingQuestion = QuestionRequest & { + receivedAt: number; +}; + +export type TodoItem = { + id: string; + content: string; + status: string; + priority: string; +}; + +export type ModelRef = { + providerID: string; + modelID: string; +}; + +export type ModelBehaviorOption = { + value: string | null; + label: string; + description: string; +}; + +export type ModelOption = { + providerID: string; + modelID: string; + title: string; + description?: string; + footer?: string; + behaviorTitle: string; + behaviorLabel: string; + behaviorDescription: string; + behaviorValue: string | null; + behaviorOptions?: ModelBehaviorOption[]; + disabled?: boolean; + isFree: boolean; + isConnected: boolean; + isRecommended?: boolean; +}; + +export type SelectedSessionSnapshot = { + session: Session | null; + status: string; + modelLabel: string; +}; + +export type WorkspaceState = { + active: WorkspaceInfo | null; + path: string; + root: string; +}; + +export type PluginState = { + scope: PluginScope; + config: OpencodeConfigFile | null; + list: string[]; +}; + +export type WorkspaceDisplay = WorkspaceInfo & { + name: string; +}; + +export type UpdateHandle = { + available: boolean; + currentVersion: string; + version: string; + date?: string; + body?: string; + rawJson: Record; + close: () => Promise; + download: (onEvent?: (event: any) => void) => Promise; + install: () => Promise; + downloadAndInstall: (onEvent?: (event: any) => void) => Promise; +}; diff --git a/apps/app/src/app/utils/index.ts b/apps/app/src/app/utils/index.ts new file mode 100644 index 0000000000..51307f5b49 --- /dev/null +++ b/apps/app/src/app/utils/index.ts @@ -0,0 +1,1070 @@ +import type { Part, Session } from "@opencode-ai/sdk/v2/client"; +import { t } from "../../i18n"; +import type { + ArtifactItem, + MessageGroup, + MessageInfo, + MessageWithParts, + ModelRef, + OpencodeEvent, + PlaceholderAssistantMessage, + ProviderListItem, +} from "../types"; +import type { WorkspaceInfo } from "../lib/desktop"; + +export function formatModelRef(model: ModelRef) { + return `${model.providerID}/${model.modelID}`; +} + +export function parseModelRef(raw: string | null): ModelRef | null { + if (!raw) return null; + const trimmed = raw.trim(); + if (!trimmed) return null; + const [providerID, ...rest] = trimmed.split("/"); + if (!providerID || rest.length === 0) return null; + return { providerID, modelID: rest.join("/") }; +} + +export function modelEquals(a: ModelRef, b: ModelRef) { + return a.providerID === b.providerID && a.modelID === b.modelID; +} + +const FRIENDLY_PROVIDER_LABELS: Record = { + opencode: "OpenCode", + openai: "OpenAI", + anthropic: "Anthropic", + google: "Google", + openrouter: "OpenRouter", +}; + +const humanizeModelLabel = (value: string) => { + const normalized = value.trim().toLowerCase(); + if (normalized && FRIENDLY_PROVIDER_LABELS[normalized]) { + return FRIENDLY_PROVIDER_LABELS[normalized]; + } + + const cleaned = value.replace(/[_-]+/g, " ").replace(/\s+/g, " ").trim(); + if (!cleaned) return value; + + return cleaned + .split(" ") + .filter(Boolean) + .map((word) => { + if (/\d/.test(word) || word.length <= 3) { + return word.toUpperCase(); + } + const lower = word.toLowerCase(); + return lower.charAt(0).toUpperCase() + lower.slice(1); + }) + .join(" "); +}; + +export function formatModelLabel(model: ModelRef, providers: ProviderListItem[] = []) { + const provider = providers.find((p) => p.id === model.providerID); + const modelInfo = provider?.models?.[model.modelID]; + + const providerLabel = provider?.name ?? humanizeModelLabel(model.providerID); + const modelLabel = modelInfo?.name ?? humanizeModelLabel(model.modelID); + + return `${providerLabel} · ${modelLabel}`; +} + +export function isTauriRuntime() { + return typeof window !== "undefined" && (window as any).__TAURI_INTERNALS__ != null; +} + +export function isElectronRuntime() { + return typeof window !== "undefined" && (window as Window).__OPENWORK_ELECTRON__ != null; +} + +export function isDesktopRuntime() { + return isTauriRuntime() || isElectronRuntime(); +} + +export function isWindowsPlatform() { + if (typeof navigator === "undefined") return false; + + const ua = typeof navigator.userAgent === "string" ? navigator.userAgent : ""; + const platform = + typeof (navigator as any).userAgentData?.platform === "string" + ? (navigator as any).userAgentData.platform + : typeof navigator.platform === "string" + ? navigator.platform + : ""; + + return /windows/i.test(platform) || /windows/i.test(ua); +} + +export function isMacPlatform() { + if (typeof navigator === "undefined") return false; + + const ua = typeof navigator.userAgent === "string" ? navigator.userAgent : ""; + const platform = + typeof (navigator as any).userAgentData?.platform === "string" + ? (navigator as any).userAgentData.platform + : typeof navigator.platform === "string" + ? navigator.platform + : ""; + + return /mac/i.test(platform) || /macintosh|mac os x/i.test(ua); +} + +const STARTUP_PREF_KEY = "openwork.startupPref"; +const LEGACY_PREF_KEY = "openwork.modePref"; +const LEGACY_PREF_KEY_ALT = "openwork_mode_pref"; + +export function readStartupPreference(): "local" | "server" | null { + if (typeof window === "undefined") return null; + + try { + const pref = + window.localStorage.getItem(STARTUP_PREF_KEY) ?? + window.localStorage.getItem(LEGACY_PREF_KEY) ?? + window.localStorage.getItem(LEGACY_PREF_KEY_ALT); + + if (pref === "local" || pref === "server") return pref; + if (pref === "host") return "local"; + if (pref === "client") return "server"; + } catch { + // ignore + } + + return null; +} + +export function writeStartupPreference(nextPref: "local" | "server") { + if (typeof window === "undefined") return; + + try { + window.localStorage.setItem(STARTUP_PREF_KEY, nextPref); + window.localStorage.removeItem(LEGACY_PREF_KEY); + window.localStorage.removeItem(LEGACY_PREF_KEY_ALT); + } catch { + // ignore + } +} + +export function clearStartupPreference() { + if (typeof window === "undefined") return; + + try { + window.localStorage.removeItem(STARTUP_PREF_KEY); + window.localStorage.removeItem(LEGACY_PREF_KEY); + window.localStorage.removeItem(LEGACY_PREF_KEY_ALT); + } catch { + // ignore + } +} + +export function safeStringify(value: unknown) { + const seen = new WeakSet(); + + try { + return JSON.stringify( + value, + (key, val) => { + if (val && typeof val === "object") { + if (seen.has(val as object)) { + return ""; + } + seen.add(val as object); + } + + const lowerKey = key.toLowerCase(); + if ( + lowerKey === "reasoningencryptedcontent" || + lowerKey.includes("api_key") || + lowerKey.includes("apikey") || + lowerKey.includes("access_token") || + lowerKey.includes("refresh_token") || + lowerKey.includes("token") || + lowerKey.includes("authorization") || + lowerKey.includes("cookie") || + lowerKey.includes("secret") + ) { + return "[redacted]"; + } + + return val; + }, + 2, + ); + } catch { + return ""; + } +} + +export function formatBytes(bytes: number) { + if (!Number.isFinite(bytes) || bytes <= 0) return "0 B"; + const units = ["B", "KB", "MB", "GB"] as const; + const idx = Math.min(units.length - 1, Math.floor(Math.log(bytes) / Math.log(1024))); + const value = bytes / Math.pow(1024, idx); + const rounded = idx === 0 ? Math.round(value) : Math.round(value * 10) / 10; + return `${rounded} ${units[idx]}`; +} + +/** + * Convert a directory path to a forward-slash normalised form for **local** + * comparison only (e.g. case-insensitive matching via {@link normalizeDirectoryPath}). + * + * **Do NOT use this when building a directory value that will be sent to the + * OpenCode server** (session.list, session.create, mcp.status, etc.). The + * server compares directories with strict equality and on Windows it stores + * native backslash paths. Use + * {@link import("../lib/session-scope").toSessionTransportDirectory toSessionTransportDirectory} + * instead — it returns a branded {@link import("../lib/session-scope").TransportDirectory TransportDirectory} + * that the compiler can enforce. + */ +export function normalizeDirectoryQueryPath(input?: string | null) { + const trimmed = (input ?? "").trim(); + if (!trimmed) return ""; + const withoutVerbatim = /^\\\\\?\\UNC\\/i.test(trimmed) + ? `\\${trimmed.slice(7)}` + : /^\\\\\?\\[a-zA-Z]:[\\/]/.test(trimmed) + ? trimmed.slice(4) + : trimmed; + const unified = withoutVerbatim.replace(/\\/g, "/"); + const withoutTrailing = unified.replace(/\/+$/, ""); + return withoutTrailing || "/"; +} + +export function normalizeDirectoryPath(input?: string | null) { + const normalized = normalizeDirectoryQueryPath(input); + if (!normalized) return ""; + return isWindowsPlatform() || isMacPlatform() ? normalized.toLowerCase() : normalized; +} + +export function normalizeEvent(raw: unknown): OpencodeEvent | null { + if (!raw || typeof raw !== "object") { + return null; + } + + const record = raw as Record; + + if (typeof record.type === "string") { + return { + type: record.type, + properties: record.properties, + }; + } + + if (record.payload && typeof record.payload === "object") { + const payload = record.payload as Record; + if (typeof payload.type === "string") { + return { + type: payload.type, + properties: payload.properties, + }; + } + } + + return null; +} + +export function formatRelativeTime(timestampMs: number) { + const delta = Date.now() - timestampMs; + + if (delta < 0) { + return t("time.just_now"); + } + + if (delta < 60_000) { + return t("time.seconds_ago", { count: Math.max(1, Math.round(delta / 1000)) }); + } + + if (delta < 60 * 60_000) { + return t("time.minutes_ago", { count: Math.max(1, Math.round(delta / 60_000)) }); + } + + if (delta < 24 * 60 * 60_000) { + return t("time.hours_ago", { count: Math.max(1, Math.round(delta / (60 * 60_000))) }); + } + + return new Date(timestampMs).toLocaleDateString(); +} + +export function addOpencodeCacheHint(message: string) { + const lower = message.toLowerCase(); + const cacheSignals = [ + ".cache/opencode", + "library/caches/opencode", + "appdata/local/opencode", + "fetch_jwks.js", + "opencode cache", + ]; + + if (cacheSignals.some((signal) => lower.includes(signal)) && lower.includes("enoent")) { + return `${message}\n\nOpenCode cache looks corrupted. Use Repair cache in Settings to rebuild it.`; + } + + return message; +} + +const SANDBOX_DOCKER_OFFLINE_HINTS = [ + "cannot connect to the docker daemon", + "is the docker daemon running", + "docker daemon", + "docker desktop", + "docker engine", + "error during connect", + "docker.sock", + "docker_socket", + "open //./pipe/docker_engine", +]; + +const SANDBOX_NETWORK_HINTS = [ + "failed to fetch", + "fetch failed", + "networkerror", + "request timed out", + "timeout", + "connection refused", + "econnrefused", + "connection reset", + "socket hang up", + "enotfound", + "getaddrinfo", + "could not connect", +]; + +export function isSandboxWorkspace(workspace: WorkspaceInfo) { + return ( + workspace.workspaceType === "remote" && + (workspace.sandboxBackend === "docker" || + workspace.sandboxBackend === "microsandbox" || + Boolean(workspace.sandboxRunId?.trim()) || + Boolean(workspace.sandboxContainerName?.trim())) + ); +} + +export function redactTokenLikeText(value: string): string { + return value + .replace(/([?&](?:access_token|api_key|key|password|token)=)[^&\s]+/gi, "$1[redacted]") + .replace(/\b(authorization:\s*bearer\s+)[^\s,]+/gi, "$1[redacted]") + .replace(/\b(bearer\s+)[a-z0-9._~+/=-]+/gi, "$1[redacted]") + .replace(/\bowt_[a-z0-9_-]+\b/gi, "owt_[redacted]"); +} + +export function getWorkspaceTaskLoadErrorDisplay(workspace: WorkspaceInfo, error?: string | null) { + const raw = redactTokenLikeText(error?.trim() ?? ""); + const fallbackTitle = raw || "Failed to load tasks"; + if (!raw || !isSandboxWorkspace(workspace)) { + return { + tone: "error" as const, + label: "Error", + message: raw && workspace.workspaceType === "remote" ? raw : "Failed to load tasks", + title: fallbackTitle, + }; + } + + const normalized = raw.toLowerCase(); + const hasDockerHint = SANDBOX_DOCKER_OFFLINE_HINTS.some((hint) => normalized.includes(hint)); + const hasNetworkHint = SANDBOX_NETWORK_HINTS.some((hint) => normalized.includes(hint)); + const host = `${workspace.baseUrl ?? ""} ${workspace.openworkHostUrl ?? ""}`.toLowerCase(); + const localHost = host.includes("localhost") || host.includes("127.0.0.1"); + + if (!hasDockerHint && !(localHost && hasNetworkHint)) { + return { + tone: "error" as const, + label: "Error", + message: "Failed to load tasks", + title: fallbackTitle, + }; + } + + const message = "Sandbox is offline. Start Docker Desktop, then test connection."; + return { + tone: "offline" as const, + label: "Offline", + message, + title: `${message}\n\n${raw}`, + }; +} + +export function parseTemplateFrontmatter(raw: string) { + const trimmed = raw.trimStart(); + if (!trimmed.startsWith("---")) return null; + const endIndex = trimmed.indexOf("\n---", 3); + if (endIndex === -1) return null; + const header = trimmed.slice(3, endIndex).trim(); + const body = trimmed.slice(endIndex + 4).replace(/^\r?\n/, ""); + const data: Record = {}; + + const unescapeValue = (value: string) => { + if (value.startsWith("\"") && value.endsWith("\"")) { + const inner = value.slice(1, -1); + return inner.replace(/\\(\\|\"|n|r|t)/g, (_match, code) => { + switch (code) { + case "n": + return "\n"; + case "r": + return "\r"; + case "t": + return "\t"; + case "\\": + return "\\"; + case "\"": + return "\""; + default: + return code; + } + }); + } + + if (value.startsWith("'") && value.endsWith("'")) { + return value.slice(1, -1).replace(/''/g, "'"); + } + + return value; + }; + + for (const line of header.split(/\r?\n/)) { + const entry = line.trim(); + if (!entry) continue; + const colonIndex = entry.indexOf(":"); + if (colonIndex === -1) continue; + const key = entry.slice(0, colonIndex).trim(); + let value = entry.slice(colonIndex + 1).trim(); + if (!key) continue; + value = unescapeValue(value); + data[key] = value; + } + + return { data, body }; +} + +export function upsertSession(list: Session[], next: Session) { + const idx = list.findIndex((s) => s.id === next.id); + if (idx === -1) return [...list, next]; + + const copy = list.slice(); + copy[idx] = next; + return copy; +} + +export function normalizeSessionStatus(status: unknown) { + if (!status || typeof status !== "object") return "idle"; + const record = status as Record; + if (record.type === "busy") return "running"; + if (record.type === "retry") return "retry"; + if (record.type === "idle") return "idle"; + return "idle"; +} + +export function modelFromUserMessage(info: MessageInfo): ModelRef | null { + if (!info || typeof info !== "object") return null; + if ((info as any).role !== "user") return null; + + const model = (info as any).model as unknown; + if (!model || typeof model !== "object") return null; + + const providerID = (model as any).providerID; + const modelID = (model as any).modelID; + + if (typeof providerID !== "string" || typeof modelID !== "string") return null; + return { providerID, modelID }; +} + +export function lastUserModelFromMessages(list: MessageWithParts[]): ModelRef | null { + for (let i = list.length - 1; i >= 0; i -= 1) { + const model = modelFromUserMessage(list[i]?.info); + if (model) return model; + } + + return null; +} + +export function isStepPart(part: Part) { + return part.type === "reasoning" || part.type === "tool"; +} + +export function isUserVisiblePart(part: Part) { + const flags = part as { synthetic?: boolean; ignored?: boolean }; + return !flags.synthetic && !flags.ignored; +} + +export function isVisibleTextPart(part: Part) { + return part.type === "text" && isUserVisiblePart(part); +} + +const EXPLORATION_TOOL_NAMES = new Set(["read", "glob", "grep", "search", "list", "list_files"]); + +function isExplorationToolPart(part: Part) { + if (part.type !== "tool") return false; + const tool = typeof (part as any).tool === "string" ? String((part as any).tool).toLowerCase() : ""; + return EXPLORATION_TOOL_NAMES.has(tool); +} + +export function groupMessageParts(parts: Part[], messageId: string): MessageGroup[] { + const groups: MessageGroup[] = []; + const explorationSteps: Part[] = []; + let textBuffer = ""; + let stepGroupIndex = 0; + let sawExecution = false; + + const flushText = () => { + if (!textBuffer) return; + groups.push({ + kind: "text", + part: { type: "text", text: textBuffer } as Part, + segment: sawExecution ? "result" : "intent", + }); + textBuffer = ""; + }; + + const pushSteps = (stepParts: Part[], mode: "exploration" | "standalone") => { + if (!stepParts.length) return; + groups.push({ + kind: "steps", + id: `steps-${messageId}-${stepGroupIndex}`, + parts: stepParts, + segment: "execution", + mode, + }); + stepGroupIndex += 1; + sawExecution = true; + }; + + const flushExplorationSteps = () => { + if (!explorationSteps.length) return; + pushSteps(explorationSteps.splice(0, explorationSteps.length), "exploration"); + }; + + parts.forEach((part) => { + if (part.type === "text") { + if (!isVisibleTextPart(part)) { + return; + } + flushExplorationSteps(); + textBuffer += (part as { text?: string }).text ?? ""; + return; + } + + if (part.type === "agent") { + flushExplorationSteps(); + const name = (part as { name?: string }).name ?? ""; + textBuffer += name ? `@${name}` : "@agent"; + return; + } + + if (part.type === "file") { + flushExplorationSteps(); + flushText(); + groups.push({ kind: "text", part, segment: sawExecution ? "result" : "intent" }); + return; + } + + if (part.type === "step-start" || part.type === "step-finish") { + return; + } + + flushText(); + + if (isExplorationToolPart(part)) { + explorationSteps.push(part); + return; + } + + if (part.type === "reasoning" && explorationSteps.length > 0) { + explorationSteps.push(part); + return; + } + + flushExplorationSteps(); + pushSteps([part], "standalone"); + }); + + flushText(); + + flushExplorationSteps(); + + return groups; +} + +/** Classify a tool name into a semantic category for icon selection */ +export function classifyTool(toolName: string): "read" | "edit" | "write" | "search" | "terminal" | "glob" | "task" | "skill" | "tool" { + const lower = toolName.toLowerCase(); + if (lower === "skill") return "skill"; + if (lower.includes("read") || lower.includes("cat") || lower.includes("fetch")) return "read"; + if (lower === "apply_patch") return "write"; + if (lower.includes("edit") || lower.includes("replace") || lower.includes("update")) return "edit"; + if (lower.includes("write") || lower.includes("create") || lower.includes("patch")) return "write"; + if (lower.includes("grep") || lower.includes("search") || lower.includes("find")) return "search"; + if (lower.includes("bash") || lower.includes("shell") || lower.includes("exec") || lower.includes("command") || lower.includes("run")) return "terminal"; + if (lower.includes("glob") || lower.includes("list") || lower.includes("ls")) return "glob"; + if (lower.includes("task") || lower.includes("agent") || lower.includes("todo")) return "task"; + return "tool"; +} + +/** Extract a clean filename from a file path */ +function extractFilename(filePath: string): string { + const parts = filePath.replace(/\\/g, "/").split("/"); + return parts[parts.length - 1] || filePath; +} + +function normalizeStepText(value: unknown): string { + if (typeof value !== "string") return ""; + return value.replace(/\s+/g, " ").trim(); +} + +function cleanReasoningText(value: string): string { + return value + .replace(/\[REDACTED\]/g, "") + .replace(/\*\*([^*]+)\*\*/g, "$1") + .replace(/__([^_]+)__/g, "$1") + .replace(/`([^`]+)`/g, "$1") + .trim(); +} + +function truncateStepText(value: string, max = 80): string { + return value.length > max ? `${value.slice(0, Math.max(0, max - 3))}...` : value; +} + +function isPathLike(value: string): boolean { + return /^(?:[A-Za-z]:[\\/]|~[\\/]|\/|\.\.?[\\/])/.test(value) || /[\\/]/.test(value); +} + +function normalizePathToken(value: string): string { + const clean = value.trim().replace(/^[`'"([{]+|[`'"\])},.;:]+$/g, ""); + if (!isPathLike(clean)) return clean; + return extractFilename(clean); +} + +function formatAgentLabel(value: string): string { + const clean = value.trim().replace(/[_-]+/g, " "); + if (!clean) return ""; + return clean + .split(/\s+/) + .filter(Boolean) + .map((segment) => segment.charAt(0).toUpperCase() + segment.slice(1)) + .join(" "); +} + +function getToolInput(state: any): Record { + const input = state?.input; + if (input && typeof input === "object") return input as Record; + return {}; +} + +function pickInputText(input: Record, keys: string[]): string { + for (const key of keys) { + const value = input[key]; + const text = normalizeStepText(value); + if (text) return text; + } + return ""; +} + +function buildToolTitle(state: any, toolName: string): string { + const lower = toolName.toLowerCase(); + const input = getToolInput(state); + const pick = (...keys: string[]) => pickInputText(input, keys); + const file = (...keys: string[]) => { + const value = pick(...keys); + if (!value) return ""; + return normalizePathToken(value); + }; + + if (lower === "read") { + const target = file("filePath", "path", "file"); + return target ? `Reviewed ${target}` : "Reviewed file"; + } + + if (lower === "edit") { + const target = file("filePath", "path", "file"); + return target ? `Updated ${target}` : "Updated file"; + } + + if (lower === "write") { + const target = file("filePath", "path", "file"); + return target ? `Write ${target}` : "Write file"; + } + + if (lower === "apply_patch") { + return "Apply patch"; + } + + if (lower === "list" || lower === "list_files") { + const target = file("path"); + return target ? `Reviewed ${target}` : "Reviewed files"; + } + + if (lower === "grep" || lower === "glob" || lower === "search") { + const pattern = pick("pattern", "query"); + return pattern ? `Searched ${truncateStepText(pattern, 44)}` : "Searched code"; + } + + if (lower === "bash") { + const description = pick("description"); + if (description) return truncateStepText(description, 56); + const command = pick("command", "cmd"); + if (command) return truncateStepText(`Run ${command}`, 56); + return "Run command"; + } + + if (lower === "task") { + const agent = formatAgentLabel(pick("subagent_type")); + if (agent) return `${agent} task`; + return "Task"; + } + + if (lower === "todowrite") { + return "Update todo list"; + } + + if (lower === "todoread") { + return "Read todo list"; + } + + if (lower === "webfetch") { + const url = pick("url"); + return url ? `Checked ${truncateStepText(url, 44)}` : "Checked web page"; + } + + if (lower === "skill") { + const name = pick("name"); + return name ? `Load skill ${name}` : "Load skill"; + } + + const stateTitle = normalizeStepText(state?.title); + if (stateTitle) { + return truncateStepText(isPathLike(stateTitle) ? normalizePathToken(stateTitle) : stateTitle, 56); + } + + const fallback = normalizeStepText(toolName) + .replace(/[_-]+/g, " ") + .replace(/\s+/g, " "); + return fallback || "Tool"; +} + +/** Build a concise detail line for a tool call — avoids dumping raw output */ +function buildToolDetail(state: any, toolName: string): string | undefined { + const lower = toolName.toLowerCase(); + const input = getToolInput(state); + const pick = (...keys: string[]) => pickInputText(input, keys); + + if (lower === "read") { + const chunks: string[] = []; + const offset = input.offset; + const limit = input.limit; + if (typeof offset === "number") chunks.push(`offset ${offset}`); + if (typeof limit === "number") chunks.push(`limit ${limit}`); + if (chunks.length > 0) return chunks.join(" - "); + return undefined; + } + + if (lower === "bash") { + const command = pick("command", "cmd"); + if (command) return truncateStepText(command, 80); + } + + if (lower === "grep" || lower === "glob" || lower === "search") { + const root = pick("path"); + if (root) return `in ${normalizePathToken(root)}`; + } + + if (lower === "task") { + const description = pick("description"); + if (description) return truncateStepText(description, 80); + const agent = formatAgentLabel(pick("subagent_type")); + if (agent) return `${agent} agent`; + } + + if (lower === "todowrite" || lower === "todoread") { + return undefined; + } + + if (lower === "webfetch") { + const url = pick("url"); + if (url) return truncateStepText(url, 80); + } + + // For file operations, show the filename + const filePath = state?.path ?? state?.file; + if (typeof filePath === "string" && filePath.trim()) { + const name = extractFilename(filePath.trim()); + const status = state?.status; + if (status === "completed" || status === "done") { + return name; + } + return name; + } + + // For edits that report updated files, show filename(s) + const files = state?.files; + if (Array.isArray(files) && files.length > 0) { + const names = files.filter((f: any) => typeof f === "string").map(extractFilename); + if (names.length === 1) return names[0]; + if (names.length > 1) return `${names[0]} +${names.length - 1} more`; + } + + // For bash/terminal commands, show the command + const command = state?.command ?? state?.cmd; + if (typeof command === "string" && command.trim()) { + const clean = command.trim(); + return clean.length > 80 ? `${clean.slice(0, 77)}...` : clean; + } + + // For search/grep, show the pattern + const pattern = state?.pattern ?? state?.query; + if (typeof pattern === "string" && pattern.trim()) { + return `"${pattern.trim().length > 60 ? pattern.trim().slice(0, 57) + "..." : pattern.trim()}"`; + } + + // Subtitle/detail from state as fallback + const subtitle = state?.subtitle ?? state?.detail ?? state?.summary; + if (typeof subtitle === "string" && subtitle.trim()) { + const clean = subtitle.trim(); + return clean.length > 80 ? `${clean.slice(0, 77)}...` : clean; + } + + // For completed tools with output, show a very short summary + const outputRaw = typeof state?.output === "string" ? state.output.trim() : ""; + if (outputRaw) { + if (lower === "read") return undefined; + + const output = outputRaw.length > 3000 ? outputRaw.slice(0, 3000) : outputRaw; + + // Extract just the first meaningful line (skip line numbers and raw file markers) + const lines = output.split("\n").filter((l: string) => { + const trimmed = l.trim(); + return ( + trimmed && + !trimmed.startsWith("") && + !trimmed.startsWith("") && + !trimmed.startsWith("") && + !trimmed.startsWith("") && + !trimmed.startsWith("") && + !/^\d{5}\|/.test(trimmed) && + !/^\d+:\s/.test(trimmed) + ); + }); + if (lines.length > 0) { + const first = lines[0].trim(); + if (first.startsWith("Success")) { + // "Success. Updated the following files: M foo.ts" -> "foo.ts" + const match = first.match(/:\s*[MADR]\s+(.+)/); + if (match) return extractFilename(match[1].trim()); + return "Done"; + } + return first.length > 80 ? `${first.slice(0, 77)}...` : first; + } + } + + return undefined; +} + +const ARTIFACT_PATH_PATTERN = + /(?:^|[\s"'`([{])((?:[a-zA-Z]:[/\\]|\.{1,2}[/\\]|~[/\\]|[/\\])[\w./\\\-]*\.[a-z][a-z0-9]{0,9}|[\w.\-]+[/\\][\w./\\\-]*\.[a-z][a-z0-9]{0,9})/gi; +const ARTIFACT_OUTPUT_SCAN_LIMIT = 4000; +const ARTIFACT_OUTPUT_SKIP_TOOLS = new Set(["webfetch"]); + +// Patterns that indicate a path is a truncated system/absolute path rather than a workspace-relative path +const TRUNCATED_SYSTEM_PATH_PATTERNS = [ + /com\.[^/]+\.(openwork|opencode)/i, // macOS app bundle identifiers + /\.openwork\.dev\//i, // OpenWork dev paths + /Application Support\//i, // macOS Application Support + /AppData[/\\]/i, // Windows AppData + /\.local\/share\//i, // Linux XDG data + /workspaces\/[^/]+\/workspaces\//i, // Nested workspaces paths (clearly malformed) +]; + +/** + * Clean up an artifact path to extract the workspace-relative portion. + * Returns null if the path should be rejected entirely. + */ +function cleanArtifactPath(rawPath: string): string | null { + const normalized = rawPath.trim().replace(/[\\/]+/g, "/"); + if (!normalized) return null; + + // Check if this looks like a truncated system path + for (const pattern of TRUNCATED_SYSTEM_PATH_PATTERNS) { + if (pattern.test(normalized)) { + // Try to extract just the relative part after "workspaces//" + const workspacesMatch = normalized.match(/workspaces\/[^/]+\/(.+)$/i); + if (workspacesMatch && workspacesMatch[1]) { + const relative = workspacesMatch[1]; + // Validate the extracted path doesn't still contain system patterns + if (!TRUNCATED_SYSTEM_PATH_PATTERNS.some((p) => p.test(relative))) { + return relative; + } + } + // Reject the path entirely if we can't extract a clean relative path + return null; + } + } + + return normalized; +} + +type DeriveArtifactsOptions = { + maxMessages?: number; +}; + +export function summarizeStep(part: Part): { title: string; detail?: string; isSkill?: boolean; skillName?: string; toolCategory?: string; status?: string } { + if (part.type === "tool") { + const record = part as any; + const toolName = record.tool ? String(record.tool) : "Tool"; + const state = record.state ?? {}; + const title = buildToolTitle(state, toolName); + const category = classifyTool(toolName); + const status = state.status ? String(state.status) : undefined; + const detail = buildToolDetail(state, toolName); + const normalizedTitle = normalizeStepText(title).toLowerCase(); + const finalDetail = detail && normalizeStepText(detail).toLowerCase() !== normalizedTitle ? detail : undefined; + + // Detect skill trigger + if (category === "skill") { + const skillName = state.metadata?.name || title.replace(/^(Loaded skill:\s*|Load skill\s+)/i, ""); + return { title, isSkill: true, skillName, detail: finalDetail, toolCategory: category, status }; + } + + return { title, detail: finalDetail, toolCategory: category, status }; + } + + if (part.type === "reasoning") { + const record = part as any; + const text = typeof record.text === "string" ? cleanReasoningText(record.text) : ""; + if (!text) return { title: "Thinking", toolCategory: "tool" }; + + const lines = text + .split(/\r?\n/) + .map((line: string) => line.trim()) + .filter(Boolean); + const compact = lines.join(" "); + + let headline = ""; + let detail = ""; + if (lines.length > 1) { + headline = lines[0]; + detail = lines.slice(1).join("\n"); + } else { + const sentenceBreak = compact.indexOf(". "); + if (sentenceBreak > 18 && sentenceBreak < 120) { + headline = compact.slice(0, sentenceBreak + 1).trim(); + detail = compact.slice(sentenceBreak + 2).trim(); + } else { + headline = compact; + detail = compact; + } + } + + headline = headline.replace(/^thinking[:\s-]*/i, "").trim(); + const title = truncateStepText(headline || "Thinking", 96); + return { title, detail: detail || undefined, toolCategory: "tool" }; + } + + if (part.type === "step-start" || part.type === "step-finish") { + const reason = (part as any).reason; + return { + title: part.type === "step-start" ? "Step started" : "Step finished", + detail: reason ? String(reason) : undefined, + toolCategory: "tool", + }; + } + + return { title: "Step", toolCategory: "tool" }; +} + +export function deriveArtifacts(list: MessageWithParts[], options: DeriveArtifactsOptions = {}): ArtifactItem[] { + const results = new Map(); + const maxMessages = + typeof options.maxMessages === "number" && Number.isFinite(options.maxMessages) && options.maxMessages > 0 + ? Math.floor(options.maxMessages) + : null; + const source = maxMessages && list.length > maxMessages ? list.slice(list.length - maxMessages) : list; + + source.forEach((message) => { + const messageId = String((message.info as any)?.id ?? ""); + + message.parts.forEach((part) => { + if (part.type !== "tool") return; + const record = part as any; + const state = record.state ?? {}; + const matches = new Set(); + + const explicit = [ + state.path, + state.file, + ...(Array.isArray(state.files) ? state.files : []), + ]; + + explicit.forEach((f) => { + if (typeof f === "string") { + const trimmed = f.trim(); + if ( + trimmed.length > 0 && + trimmed.length <= 500 && + trimmed.includes(".") && + !/^\.{2,}$/.test(trimmed) + ) { + matches.add(trimmed); + } + } + }); + + const toolName = + typeof record.tool === "string" && record.tool.trim() + ? record.tool.trim().toLowerCase() + : ""; + const titleText = typeof state.title === "string" ? state.title : ""; + const outputText = + typeof state.output === "string" && !ARTIFACT_OUTPUT_SKIP_TOOLS.has(toolName) + ? state.output.slice(0, ARTIFACT_OUTPUT_SCAN_LIMIT) + : ""; + + const text = [titleText, outputText] + .filter((v): v is string => Boolean(v)) + .join(" "); + + if (text) { + ARTIFACT_PATH_PATTERN.lastIndex = 0; + Array.from(text.matchAll(ARTIFACT_PATH_PATTERN)) + .map((m) => m[1]) + .filter((f) => f && f.length <= 500) + .forEach((f) => matches.add(f)); + } + + if (matches.size === 0) return; + + matches.forEach((match) => { + const cleanedPath = cleanArtifactPath(match); + if (!cleanedPath) return; + + const key = cleanedPath.toLowerCase(); + const name = cleanedPath.split("/").pop() ?? cleanedPath; + const id = `artifact-${encodeURIComponent(cleanedPath)}`; + + // Delete and re-add to move to end (most recent) + if (results.has(key)) results.delete(key); + results.set(key, { + id, + name, + path: cleanedPath, + kind: "file" as const, + size: state.size ? String(state.size) : undefined, + messageId: messageId || undefined, + }); + }); + }); + }); + + return Array.from(results.values()); +} + +export function deriveWorkingFiles(items: ArtifactItem[]): string[] { + const results: string[] = []; + const seen = new Set(); + + for (const item of items) { + const rawKey = item.path ?? item.name; + const normalizedPath = rawKey.trim().replace(/[\\/]+/g, "/"); + const normalizedKey = normalizedPath.toLowerCase(); + if (!normalizedPath || seen.has(normalizedKey)) continue; + seen.add(normalizedKey); + results.push(normalizedPath); + if (results.length >= 5) break; + } + + return results; +} diff --git a/apps/app/src/app/utils/plugins.ts b/apps/app/src/app/utils/plugins.ts new file mode 100644 index 0000000000..8026a6477d --- /dev/null +++ b/apps/app/src/app/utils/plugins.ts @@ -0,0 +1,88 @@ +import { parse } from "jsonc-parser"; + +import type { OpencodeConfigFile } from "../lib/desktop"; + +type PluginListValue = string | string[] | null | undefined; + +type PluginConfig = { + content: string | null; +} | null; + +export function normalizePluginList(value: PluginListValue) { + if (!value) return [] as string[]; + if (Array.isArray(value)) { + return value + .map((entry) => (typeof entry === "string" ? entry.trim() : "")) + .filter((entry) => entry.length > 0); + } + if (typeof value === "string") { + const trimmed = value.trim(); + return trimmed ? [trimmed] : []; + } + return [] as string[]; +} + +export function stripPluginVersion(spec: string) { + const trimmed = spec.trim(); + if (!trimmed) return ""; + + const looksLikeVersion = (suffix: string) => + /^(latest|next|beta|alpha|canary|rc|stable|\d)/i.test(suffix); + + if (trimmed.startsWith("@")) { + const slashIndex = trimmed.indexOf("/"); + if (slashIndex === -1) return trimmed; + + const atIndex = trimmed.indexOf("@", slashIndex + 1); + if (atIndex === -1) return trimmed; + + const suffix = trimmed.slice(atIndex + 1); + return looksLikeVersion(suffix) ? trimmed.slice(0, atIndex) : trimmed; + } + + const atIndex = trimmed.indexOf("@"); + if (atIndex === -1) return trimmed; + + const suffix = trimmed.slice(atIndex + 1); + return looksLikeVersion(suffix) ? trimmed.slice(0, atIndex) : trimmed; +} + +export function isPluginInstalled(pluginList: string[], pluginName: string, aliases: string[] = []) { + const normalized = pluginList.flatMap((entry) => { + const raw = entry.toLowerCase(); + const stripped = stripPluginVersion(entry).toLowerCase(); + return stripped && stripped !== raw ? [raw, stripped] : [raw]; + }); + + const list = new Set(normalized); + return [pluginName, ...aliases].some((entry) => list.has(entry.toLowerCase())); +} + +export function loadPluginsFromConfig( + config: PluginConfig, + onList: (next: string[]) => void, + onError: (message: string) => void, +) { + if (!config?.content) { + onList([]); + return; + } + + try { + const parsed = parse(config.content) as Record | undefined; + const next = normalizePluginList(parsed?.plugin as PluginListValue); + onList(next); + } catch (e) { + onList([]); + onError(e instanceof Error ? e.message : "Failed to parse opencode.json"); + } +} + +export function parsePluginListFromContent(content: string) { + try { + const parsed = parse(content) as Record | undefined; + return normalizePluginList(parsed?.plugin as PluginListValue); + } catch { + return [] as string[]; + } +} diff --git a/apps/app/src/app/utils/providers.ts b/apps/app/src/app/utils/providers.ts new file mode 100644 index 0000000000..966c0822b6 --- /dev/null +++ b/apps/app/src/app/utils/providers.ts @@ -0,0 +1,51 @@ +import type { Provider as ConfigProvider, ProviderListResponse } from "@opencode-ai/sdk/v2/client"; + +const PINNED_PROVIDER_ORDER = ["opencode", "openai", "anthropic"] as const; + +export const providerPriorityRank = (id: string) => { + const normalized = id.trim().toLowerCase(); + const index = PINNED_PROVIDER_ORDER.indexOf( + normalized as (typeof PINNED_PROVIDER_ORDER)[number], + ); + return index === -1 ? PINNED_PROVIDER_ORDER.length : index; +}; + +export const compareProviders = ( + a: { id: string; name?: string }, + b: { id: string; name?: string }, +) => { + const rankDiff = providerPriorityRank(a.id) - providerPriorityRank(b.id); + if (rankDiff !== 0) return rankDiff; + + const aName = (a.name ?? a.id).trim(); + const bName = (b.name ?? b.id).trim(); + return aName.localeCompare(bName); +}; + +// Starting with @opencode-ai/sdk@1.4.x, `ConfigProvider` (from `config.providers()`) +// and the provider items in `ProviderListResponse.all` share the same shape, so +// this mapper is effectively an identity function. It is kept for call-site +// stability and to normalize optional fields (`name`, `env`). +export const mapConfigProvidersToList = ( + providers: ConfigProvider[], +): ProviderListResponse["all"] => + providers.map((provider) => ({ + ...provider, + name: provider.name ?? provider.id, + env: provider.env ?? [], + })); + +export const filterProviderList = ( + value: ProviderListResponse, + disabledProviders: string[], +): ProviderListResponse => { + const disabled = new Set(disabledProviders.map((id) => id.trim()).filter(Boolean)); + if (!disabled.size) return value; + return { + all: value.all.filter((provider) => !disabled.has(provider.id)), + connected: value.connected.filter((id) => !disabled.has(id)), + default: Object.fromEntries( + Object.entries(value.default).filter(([id]) => !disabled.has(id)), + ), + }; +}; diff --git a/apps/app/src/components/ui/accordion.tsx b/apps/app/src/components/ui/accordion.tsx new file mode 100644 index 0000000000..2faf916dc7 --- /dev/null +++ b/apps/app/src/components/ui/accordion.tsx @@ -0,0 +1,75 @@ +import { Accordion as AccordionPrimitive } from "@base-ui/react/accordion" + +import { cn } from "@/lib/utils" +import { ChevronDownIcon, ChevronUpIcon } from "lucide-react" + +function Accordion({ className, ...props }: AccordionPrimitive.Root.Props) { + return ( + + ) +} + +function AccordionItem({ className, ...props }: AccordionPrimitive.Item.Props) { + return ( + + ) +} + +function AccordionTrigger({ + className, + children, + ...props +}: AccordionPrimitive.Trigger.Props) { + return ( + + + {children} + + + + + ) +} + +function AccordionContent({ + className, + children, + ...props +}: AccordionPrimitive.Panel.Props) { + return ( + +
+ {children} +
+
+ ) +} + +export { Accordion, AccordionItem, AccordionTrigger, AccordionContent } diff --git a/apps/app/src/components/ui/button.tsx b/apps/app/src/components/ui/button.tsx new file mode 100644 index 0000000000..a14483c129 --- /dev/null +++ b/apps/app/src/components/ui/button.tsx @@ -0,0 +1,56 @@ +import { Button as ButtonPrimitive } from "@base-ui/react/button" +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" + +const buttonVariants = cva( + "group/button inline-flex shrink-0 items-center justify-center rounded-4xl border border-transparent bg-clip-padding text-sm font-medium whitespace-nowrap transition-all outline-none select-none focus-visible:border-ring focus-visible:ring-3 focus-visible:ring-ring/30 active:not-aria-[haspopup]:translate-y-px disabled:pointer-events-none disabled:opacity-50 aria-invalid:border-destructive aria-invalid:ring-3 aria-invalid:ring-destructive/20 dark:aria-invalid:border-destructive/50 dark:aria-invalid:ring-destructive/40 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4 relative", + { + variants: { + variant: { + default: "bg-primary text-primary-foreground hover:bg-primary/80 bg-clip-padding shadow-xs/5 before:pointer-events-none before:absolute before:inset-0 before:rounded-[calc(var(--radius-xl)-1px)] before:shadow-[0_1px_--theme(--color-black/4%)] dark:before:shadow-[0_-1px_--theme(--color-white/6%)]", + outline: + "border-border bg-muted/20 hover:bg-muted hover:border-foreground/20 hover:text-foreground aria-expanded:bg-muted aria-expanded:text-foreground dark:bg-muted/20 dark:hover:bg-input/30 dark:hover:border-input/80 bg-clip-padding shadow-xs/5 before:pointer-events-none before:absolute before:inset-0 before:rounded-[calc(var(--radius-xl)-1px)] before:shadow-[0_1px_--theme(--color-black/4%)] dark:before:shadow-[0_-1px_--theme(--color-white/6%)]", + secondary: + "bg-secondary text-secondary-foreground hover:bg-secondary/80 aria-expanded:bg-secondary aria-expanded:text-secondary-foreground", + ghost: + "hover:bg-muted hover:text-foreground aria-expanded:bg-muted aria-expanded:text-foreground dark:hover:bg-muted/50", + destructive: + "border-border text-destructive hover:bg-destructive/10 hover:border-destructive/40 focus-visible:border-destructive/40 focus-visible:ring-destructive/20 dark:border-border dark:hover:bg-destructive/10 dark:border-destructive/40 dark:focus-visible:ring-destructive/40 bg-clip-padding shadow-xs/5 before:pointer-events-none before:absolute before:inset-0 before:rounded-[calc(var(--radius-xl)-1px)] before:shadow-[0_1px_--theme(--color-black/4%)] dark:before:shadow-[0_-1px_--theme(--color-white/6%)]", + link: "text-primary underline-offset-4 hover:underline", + }, + size: { + default: + "h-9 gap-1.5 px-3 has-data-[icon=inline-end]:pe-2.5 has-data-[icon=inline-start]:ps-2.5 rounded-xl", + xs: "h-6 gap-1 px-2.5 text-xs has-data-[icon=inline-end]:pe-2 has-data-[icon=inline-start]:ps-2 [&_svg:not([class*='size-'])]:size-3", + sm: "h-8 gap-1 px-3 has-data-[icon=inline-end]:pe-2 has-data-[icon=inline-start]:ps-2 rounded-xl", + lg: "h-10 gap-1.5 px-4 has-data-[icon=inline-end]:pe-3 has-data-[icon=inline-start]:ps-3 rounded-xl", + icon: "size-9", + "icon-xs": "size-6 [&_svg:not([class*='size-'])]:size-3", + "icon-sm": "size-8", + "icon-lg": "size-10", + }, + }, + defaultVariants: { + variant: "default", + size: "default", + }, + } +) + +function Button({ + className, + variant = "default", + size = "default", + ...props +}: ButtonPrimitive.Props & VariantProps) { + return ( + + ) +} + +export { Button, buttonVariants } diff --git a/apps/app/src/components/ui/collapsible.tsx b/apps/app/src/components/ui/collapsible.tsx new file mode 100644 index 0000000000..4b242f71d4 --- /dev/null +++ b/apps/app/src/components/ui/collapsible.tsx @@ -0,0 +1,19 @@ +import { Collapsible as CollapsiblePrimitive } from "@base-ui/react/collapsible" + +function Collapsible({ ...props }: CollapsiblePrimitive.Root.Props) { + return +} + +function CollapsibleTrigger({ ...props }: CollapsiblePrimitive.Trigger.Props) { + return ( + + ) +} + +function CollapsibleContent({ ...props }: CollapsiblePrimitive.Panel.Props) { + return ( + + ) +} + +export { Collapsible, CollapsibleTrigger, CollapsibleContent } diff --git a/apps/app/src/components/ui/dialog.tsx b/apps/app/src/components/ui/dialog.tsx new file mode 100644 index 0000000000..c470012e61 --- /dev/null +++ b/apps/app/src/components/ui/dialog.tsx @@ -0,0 +1,158 @@ +import * as React from "react"; +import { Dialog as DialogPrimitive } from "@base-ui/react/dialog" + +import { cn } from "@/lib/utils" +import { Button } from "@/components/ui/button" +import { XIcon } from "lucide-react" + +function Dialog({ ...props }: DialogPrimitive.Root.Props) { + return +} + +function DialogTrigger({ ...props }: DialogPrimitive.Trigger.Props) { + return +} + +function DialogPortal({ ...props }: DialogPrimitive.Portal.Props) { + return +} + +function DialogClose({ ...props }: DialogPrimitive.Close.Props) { + return +} + +function DialogOverlay({ + className, + ...props +}: DialogPrimitive.Backdrop.Props) { + return ( + + ) +} + +function DialogContent({ + className, + children, + showCloseButton = true, + ...props +}: DialogPrimitive.Popup.Props & { + showCloseButton?: boolean +}) { + return ( + + + + {children} + {showCloseButton && ( + + } + > + + Close + + )} + + + ) +} + +function DialogHeader({ className, ...props }: React.ComponentProps<"div">) { + return ( +
+ ) +} + +function DialogFooter({ + className, + showCloseButton = false, + children, + ...props +}: React.ComponentProps<"div"> & { + showCloseButton?: boolean +}) { + return ( +
+ {children} + {showCloseButton && ( + }> + Close + + )} +
+ ) +} + +function DialogTitle({ className, ...props }: DialogPrimitive.Title.Props) { + return ( + + ) +} + +function DialogDescription({ + className, + ...props +}: DialogPrimitive.Description.Props) { + return ( + + ) +} + +export { + Dialog, + DialogClose, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogOverlay, + DialogPortal, + DialogTitle, + DialogTrigger, +} diff --git a/apps/app/src/components/ui/field.tsx b/apps/app/src/components/ui/field.tsx new file mode 100644 index 0000000000..b2ff343451 --- /dev/null +++ b/apps/app/src/components/ui/field.tsx @@ -0,0 +1,236 @@ +import * as React from "react"; +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" +import { Label } from "@/components/ui/label" +import { Separator } from "@/components/ui/separator" + +function FieldSet({ className, ...props }: React.ComponentProps<"fieldset">) { + return ( +
[data-slot=checkbox-group]]:gap-3 has-[>[data-slot=radio-group]]:gap-3", + className + )} + {...props} + /> + ) +} + +function FieldLegend({ + className, + variant = "legend", + ...props +}: React.ComponentProps<"legend"> & { variant?: "legend" | "label" }) { + return ( + + ) +} + +function FieldGroup({ className, ...props }: React.ComponentProps<"div">) { + return ( +
+ ) +} + +const fieldVariants = cva( + "group/field flex w-full gap-3 data-[invalid=true]:text-destructive", + { + variants: { + orientation: { + vertical: "flex-col *:w-full [&>.sr-only]:w-auto", + horizontal: + "flex-row items-center has-[>[data-slot=field-content]]:items-start *:data-[slot=field-label]:flex-auto has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px", + responsive: + "flex-col *:w-full @md/field-group:flex-row @md/field-group:items-center @md/field-group:*:w-auto @md/field-group:has-[>[data-slot=field-content]]:items-start @md/field-group:*:data-[slot=field-label]:flex-auto [&>.sr-only]:w-auto @md/field-group:has-[>[data-slot=field-content]]:[&>[role=checkbox],[role=radio]]:mt-px", + }, + }, + defaultVariants: { + orientation: "vertical", + }, + } +) + +function Field({ + className, + orientation = "vertical", + ...props +}: React.ComponentProps<"div"> & VariantProps) { + return ( +
+ ) +} + +function FieldContent({ className, ...props }: React.ComponentProps<"div">) { + return ( +
+ ) +} + +function FieldLabel({ + className, + ...props +}: React.ComponentProps) { + return ( +
+
+ + + + +`; + +export const TOY_UI_FAVICON_SVG = ``; + +export const TOY_UI_JS = String.raw`const qs = (sel) => document.querySelector(sel); + +const pillConn = qs("#pill-conn"); +const pillScope = qs("#pill-scope"); +const chatlog = qs("#chatlog"); +const promptEl = qs("#prompt"); +const statusEl = qs("#status"); +const sessionIdEl = qs("#session-id"); +const workspaceIdEl = qs("#workspace-id"); +const serverVersionEl = qs("#server-version"); +const sandboxEl = qs("#sandbox"); +const fileInjectionEl = qs("#file-injection"); +const artifactsEl = qs("#artifacts"); +const approvalsEl = qs("#approvals"); +const connectEl = qs("#connect"); +const tokensEl = qs("#tokens"); +const exportEl = qs("#export"); +const importEl = qs("#import"); +const skillsEl = qs("#skills"); +const pluginsEl = qs("#plugins"); +const pluginSpecEl = qs("#plugin-spec"); +const mcpEl = qs("#mcp"); +const hostIdEl = qs("#host-id"); +const pillRun = qs("#pill-run"); +const timelineEl = qs("#timeline"); +const workspaceUrlEl = qs("#workspace-url"); +const shareScopeEl = qs("#share-scope"); +const shareLabelEl = qs("#share-label"); +const tabsEl = qs("#tabs"); + +const STORAGE_TOKEN = "openwork.toy.token"; +const STORAGE_SESSION_PREFIX = "openwork.toy.session."; + +function setPill(el, label, kind) { + el.textContent = label; + el.classList.remove("ok", "bad"); + if (kind) el.classList.add(kind); +} + +function setRun(label, kind) { + if (!pillRun) return; + setPill(pillRun, label, kind); +} + +function clearTimeline() { + if (!timelineEl) return; + timelineEl.innerHTML = ""; +} + +function summarizeEvent(payload) { + if (!payload || typeof payload !== "object") return ""; + const keys = ["name", "tool", "action", "summary", "status", "message"]; + for (const key of keys) { + const value = payload[key]; + if (typeof value === "string" && value.trim()) return value.trim(); + } + return ""; +} + +function addCheckpoint(label, detail) { + if (!timelineEl) return; + + const row = document.createElement("div"); + row.className = "item"; + + const top = document.createElement("div"); + top.className = "row"; + + const left = document.createElement("div"); + const name = document.createElement("div"); + name.className = "mono"; + name.textContent = label; + + const meta = document.createElement("div"); + meta.className = "small"; + meta.textContent = new Date().toLocaleTimeString(); + + left.appendChild(name); + left.appendChild(meta); + top.appendChild(left); + row.appendChild(top); + + if (detail) { + const d = document.createElement("div"); + d.className = "small"; + d.textContent = detail; + row.appendChild(d); + } + + timelineEl.appendChild(row); + timelineEl.scrollTop = timelineEl.scrollHeight; + + while (timelineEl.children.length > 80) { + timelineEl.removeChild(timelineEl.firstChild); + } +} + +let activeTab = "share"; + +function setTab(tab) { + activeTab = tab; + if (tabsEl) { + const buttons = tabsEl.querySelectorAll(".tab"); + buttons.forEach((btn) => { + const t = btn.getAttribute("data-tab") || ""; + btn.classList.toggle("active", t === tab); + }); + } + + const panels = document.querySelectorAll(".panel"); + panels.forEach((panel) => { + const t = panel.getAttribute("data-panel") || ""; + panel.classList.toggle("hidden", t !== tab); + }); +} + +function getTokenFromHash() { + const raw = (location.hash || "").startsWith("#") ? (location.hash || "").slice(1) : (location.hash || ""); + if (!raw) return ""; + const params = new URLSearchParams(raw); + return (params.get("token") || "").trim(); +} + +function stripHashToken() { + const raw = (location.hash || "").startsWith("#") ? (location.hash || "").slice(1) : (location.hash || ""); + if (!raw) return; + const params = new URLSearchParams(raw); + if (!params.has("token")) return; + params.delete("token"); + const next = params.toString(); + const url = location.pathname + location.search + (next ? "#" + next : ""); + history.replaceState(null, "", url); +} + +function readToken() { + const fromHash = getTokenFromHash(); + if (fromHash) { + try { localStorage.setItem(STORAGE_TOKEN, fromHash); } catch {} + stripHashToken(); + return fromHash; + } + try { + return (localStorage.getItem(STORAGE_TOKEN) || "").trim(); + } catch { + return ""; + } +} + +function parseWorkspaceIdFromPath() { + const parts = location.pathname.split("/").filter(Boolean); + const wIndex = parts.indexOf("w"); + if (wIndex !== -1 && parts[wIndex + 1]) return decodeURIComponent(parts[wIndex + 1]); + return ""; +} + +async function apiFetch(path, options) { + const token = readToken(); + const opts = options || {}; + const headers = new Headers(opts.headers || {}); + if (!headers.has("Content-Type") && opts.body && !(opts.body instanceof FormData)) { + headers.set("Content-Type", "application/json"); + } + if (token) headers.set("Authorization", "Bearer " + token); + const res = await fetch(path, { ...opts, headers }); + const text = await res.text(); + let json = null; + try { json = text ? JSON.parse(text) : null; } catch { json = null; } + if (!res.ok) { + const msg = json && json.message ? json.message : (text || res.statusText); + const code = json && json.code ? json.code : "request_failed"; + const err = new Error(code + ": " + msg); + err.status = res.status; + err.code = code; + err.details = json && json.details ? json.details : undefined; + throw err; + } + return json; +} + +function setStatus(msg, kind) { + statusEl.textContent = msg || ""; + statusEl.style.color = kind === "bad" ? "var(--danger)" : kind === "ok" ? "var(--ok)" : "var(--muted)"; +} + +function appendMsg(role, text) { + const el = document.createElement("div"); + el.className = "msg"; + const meta = document.createElement("div"); + meta.className = "meta"; + meta.textContent = role; + const content = document.createElement("div"); + content.className = "content"; + content.textContent = text; + el.appendChild(meta); + el.appendChild(content); + chatlog.appendChild(el); + chatlog.scrollTop = chatlog.scrollHeight; +} + +function renderMessages(items) { + chatlog.innerHTML = ""; + if (!Array.isArray(items) || !items.length) { + appendMsg("system", "No messages yet."); + return; + } + for (const msg of items) { + const info = msg && msg.info ? msg.info : null; + const parts = Array.isArray(msg && msg.parts) ? msg.parts : []; + const role = info && info.role ? info.role : "message"; + const textParts = parts + .filter((p) => p && p.type === "text" && typeof p.text === "string") + .map((p) => p.text); + const body = textParts.length ? textParts.join("\n") : JSON.stringify(parts, null, 2); + appendMsg(role, body); + } +} + +function sessionKey(workspaceId) { + return STORAGE_SESSION_PREFIX + workspaceId; +} + +function readSessionId(workspaceId) { + try { return (localStorage.getItem(sessionKey(workspaceId)) || "").trim(); } catch { return ""; } +} + +function writeSessionId(workspaceId, sessionId) { + try { localStorage.setItem(sessionKey(workspaceId), sessionId); } catch {} +} + +async function resolveDefaultModel(workspaceId) { + try { + const providers = await apiFetch("/w/" + encodeURIComponent(workspaceId) + "/opencode/config/providers"); + const def = providers && providers.default ? providers.default : null; + if (def && typeof def === "object") { + const entries = Object.entries(def); + if (entries.length) { + const providerID = entries[0][0]; + const modelID = entries[0][1]; + if (providerID && modelID) return { providerID, modelID }; + } + } + } catch { + // ignore + } + return null; +} + +async function ensureSession(workspaceId) { + const existing = readSessionId(workspaceId); + if (existing) return existing; + const created = await apiFetch("/w/" + encodeURIComponent(workspaceId) + "/opencode/session", { + method: "POST", + body: JSON.stringify({ title: "OpenWork Toy UI" }), + }); + const id = created && created.id ? String(created.id) : ""; + if (!id) throw new Error("session_create_failed"); + writeSessionId(workspaceId, id); + return id; +} + +async function refreshHost(workspaceId) { + const token = readToken(); + if (!token) { + setPill(pillConn, "token missing", "bad"); + setStatus("Add #token=... to the URL fragment", "bad"); + return; + } + try { + const status = await apiFetch("/status"); + const caps = await apiFetch("/capabilities"); + hostIdEl.textContent = location.origin; + serverVersionEl.textContent = caps && caps.serverVersion ? caps.serverVersion : (status && status.version ? status.version : "-"); + const sandbox = caps && caps.sandbox ? caps.sandbox : null; + sandboxEl.textContent = sandbox ? (sandbox.backend + " (" + (sandbox.enabled ? "on" : "off") + ")") : "-"; + const files = caps && caps.toolProviders && caps.toolProviders.files ? caps.toolProviders.files : null; + fileInjectionEl.textContent = files ? ((files.injection ? "upload" : "no upload") + " / " + (files.outbox ? "download" : "no download")) : "-"; + workspaceIdEl.textContent = workspaceId || "-"; + setPill(pillConn, "connected", "ok"); + setStatus("Connected", "ok"); + + try { + const me = await apiFetch("/whoami"); + const scope = me && me.actor && me.actor.scope ? me.actor.scope : "unknown"; + pillScope.textContent = "scope: " + scope; + } catch { + pillScope.textContent = "scope: unknown"; + } + } catch (e) { + setPill(pillConn, "disconnected", "bad"); + setStatus(e && e.message ? e.message : "Disconnected", "bad"); + } +} + +async function refreshMessages(workspaceId) { + const sessionId = readSessionId(workspaceId); + sessionIdEl.textContent = sessionId ? ("session: " + sessionId) : "session: -"; + if (!sessionId) { + renderMessages([]); + return; + } + const url = "/w/" + encodeURIComponent(workspaceId) + "/opencode/session/" + encodeURIComponent(sessionId) + "/message?limit=50"; + const msgs = await apiFetch(url); + renderMessages(msgs); +} + +async function listArtifacts(workspaceId) { + const data = await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/artifacts"); + const items = Array.isArray(data && data.items) ? data.items : []; + artifactsEl.innerHTML = ""; + + if (!items.length) { + const empty = document.createElement("div"); + empty.className = "item"; + empty.textContent = "No artifacts found."; + artifactsEl.appendChild(empty); + return; + } + + for (const item of items) { + const row = document.createElement("div"); + row.className = "item"; + + const top = document.createElement("div"); + top.className = "row"; + + const left = document.createElement("div"); + const name = document.createElement("div"); + name.className = "mono"; + name.textContent = item.path; + const meta = document.createElement("div"); + meta.className = "small"; + meta.textContent = String(item.size) + " bytes"; + left.appendChild(name); + left.appendChild(meta); + + const btn = document.createElement("button"); + btn.className = "btn"; + btn.textContent = "Download"; + btn.onclick = async () => { + try { + const res = await fetch( + "/workspace/" + encodeURIComponent(workspaceId) + "/artifacts/" + encodeURIComponent(item.id), + { headers: { Authorization: "Bearer " + readToken() } }, + ); + if (!res.ok) throw new Error("download_failed: " + res.status); + const blob = await res.blob(); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + const parts = String(item.path || "artifact").split("/"); + a.download = parts.length ? parts[parts.length - 1] : "artifact"; + a.click(); + setTimeout(() => URL.revokeObjectURL(url), 1000); + } catch (e) { + setStatus(e && e.message ? e.message : "Download failed", "bad"); + } + }; + + top.appendChild(left); + top.appendChild(btn); + row.appendChild(top); + artifactsEl.appendChild(row); + } +} + +async function refreshApprovals() { + approvalsEl.innerHTML = ""; + try { + const data = await apiFetch("/approvals"); + const items = Array.isArray(data && data.items) ? data.items : []; + if (!items.length) { + const empty = document.createElement("div"); + empty.className = "item"; + empty.textContent = "No pending approvals."; + approvalsEl.appendChild(empty); + return; + } + + for (const item of items) { + const row = document.createElement("div"); + row.className = "item"; + + const top = document.createElement("div"); + top.className = "row"; + + const left = document.createElement("div"); + const action = document.createElement("div"); + action.className = "mono"; + action.textContent = item.action; + const summary = document.createElement("div"); + summary.className = "small"; + summary.textContent = item.summary; + left.appendChild(action); + left.appendChild(summary); + + const buttons = document.createElement("div"); + buttons.className = "row"; + + const allow = document.createElement("button"); + allow.className = "btn primary"; + allow.textContent = "Allow"; + + const deny = document.createElement("button"); + deny.className = "btn danger"; + deny.textContent = "Deny"; + + allow.onclick = async () => { + await apiFetch("/approvals/" + encodeURIComponent(item.id), { + method: "POST", + body: JSON.stringify({ reply: "allow" }), + }); + await refreshApprovals(); + }; + + deny.onclick = async () => { + await apiFetch("/approvals/" + encodeURIComponent(item.id), { + method: "POST", + body: JSON.stringify({ reply: "deny" }), + }); + await refreshApprovals(); + }; + + buttons.appendChild(allow); + buttons.appendChild(deny); + + top.appendChild(left); + top.appendChild(buttons); + row.appendChild(top); + approvalsEl.appendChild(row); + } + } catch (e) { + const warn = document.createElement("div"); + warn.className = "item"; + warn.textContent = e && e.message ? e.message : "Approvals unavailable"; + approvalsEl.appendChild(warn); + } +} + +let eventsAbort = null; + +async function connectSse(workspaceId) { + if (eventsAbort) return; + const controller = new AbortController(); + eventsAbort = controller; + setStatus("Connecting SSE...", ""); + addCheckpoint("sse.connecting"); + + const url = "/w/" + encodeURIComponent(workspaceId) + "/opencode/event"; + const res = await fetch(url, { + headers: { Authorization: "Bearer " + readToken() }, + signal: controller.signal, + }); + + if (!res.ok || !res.body) { + eventsAbort = null; + throw new Error("sse_failed: " + res.status); + } + + setStatus("SSE connected", "ok"); + addCheckpoint("sse.connected"); + const reader = res.body.pipeThrough(new TextDecoderStream()).getReader(); + let buffer = ""; + + const pump = async () => { + while (true) { + const next = await reader.read(); + if (next.done) break; + buffer += next.value; + buffer = buffer.replaceAll("\r\n", "\n").replaceAll("\r", "\n"); + const chunks = buffer.split("\n\n"); + buffer = chunks.pop() || ""; + for (const chunk of chunks) { + const lines = chunk.split("\n"); + const dataLines = []; + for (const line of lines) { + if (line.startsWith("data:")) { + const rest = line.slice(5); + dataLines.push(rest.startsWith(" ") ? rest.slice(1) : rest); + } + } + if (!dataLines.length) continue; + const raw = dataLines.join("\n"); + try { + const event = JSON.parse(raw); + const payload = event && event.payload ? event.payload : event; + const type = payload && payload.type ? String(payload.type) : (event && event.type ? String(event.type) : "event"); + addCheckpoint(type, summarizeEvent(payload)); + if (type.endsWith(".completed") || type.endsWith(".finished") || type.endsWith(".stopped")) { + setRun("idle"); + } + if (payload && payload.type === "message.part.updated") { + void refreshMessages(workspaceId); + } + } catch { + // ignore + } + } + } + }; + + pump() + .catch(() => undefined) + .finally(() => { + eventsAbort = null; + try { reader.releaseLock(); } catch {} + setStatus("SSE disconnected", ""); + addCheckpoint("sse.disconnected"); + setRun("idle"); + }); +} + +function stopSse() { + if (!eventsAbort) return; + eventsAbort.abort(); + eventsAbort = null; +} + +function renderConnectArtifact(workspaceId, token, scope) { + const hostUrl = location.origin; + const workspaceUrl = hostUrl + "/w/" + encodeURIComponent(workspaceId); + const payload = { + kind: "openwork.connect.v1", + hostUrl: hostUrl, + workspaceId: workspaceId, + workspaceUrl: workspaceUrl, + token: token, + tokenScope: scope, + createdAt: Date.now(), + }; + connectEl.textContent = JSON.stringify(payload, null, 2); +} + +async function showConnectArtifact(workspaceId) { + const token = readToken(); + let scope = "collaborator"; + try { + const me = await apiFetch("/whoami"); + const s = me && me.actor && me.actor.scope ? me.actor.scope : ""; + if (s) scope = s; + } catch { + // ignore + } + renderConnectArtifact(workspaceId, token, scope); +} + +async function mintShareToken(workspaceId) { + const scope = shareScopeEl && shareScopeEl.value ? String(shareScopeEl.value) : "collaborator"; + const label = shareLabelEl && shareLabelEl.value ? String(shareLabelEl.value).trim() : ""; + const issued = await apiFetch("/tokens", { + method: "POST", + body: JSON.stringify({ scope, label: label || undefined }), + }); + const token = issued && issued.token ? String(issued.token) : ""; + const tokenScope = issued && issued.scope ? String(issued.scope) : scope; + if (!token) throw new Error("token_missing"); + renderConnectArtifact(workspaceId, token, tokenScope); + setStatus("Token minted: " + tokenScope, "ok"); +} + +async function refreshTokens() { + if (!tokensEl) return; + tokensEl.innerHTML = ""; + try { + const data = await apiFetch("/tokens"); + const items = Array.isArray(data && data.items) ? data.items : []; + if (!items.length) { + const empty = document.createElement("div"); + empty.className = "item"; + empty.textContent = "No tokens."; + tokensEl.appendChild(empty); + return; + } + for (const item of items) { + const row = document.createElement("div"); + row.className = "item"; + const top = document.createElement("div"); + top.className = "row"; + + const left = document.createElement("div"); + const title = document.createElement("div"); + title.className = "mono"; + title.textContent = (item.scope ? String(item.scope) : "token") + " " + (item.id ? String(item.id) : ""); + const meta = document.createElement("div"); + meta.className = "small"; + meta.textContent = item.label ? String(item.label) : ""; + left.appendChild(title); + if (meta.textContent) left.appendChild(meta); + + const revoke = document.createElement("button"); + revoke.className = "btn danger"; + revoke.textContent = "Revoke"; + revoke.onclick = async () => { + try { + await apiFetch("/tokens/" + encodeURIComponent(String(item.id || "")), { method: "DELETE" }); + await refreshTokens(); + } catch (e) { + setStatus(e && e.message ? e.message : "Revoke failed", "bad"); + } + }; + + top.appendChild(left); + top.appendChild(revoke); + row.appendChild(top); + tokensEl.appendChild(row); + } + } catch (e) { + const warn = document.createElement("div"); + warn.className = "item"; + warn.textContent = e && e.message ? e.message : "Tokens unavailable"; + tokensEl.appendChild(warn); + } +} + +async function exportWorkspace(workspaceId) { + if (!exportEl) return; + exportEl.textContent = ""; + const data = await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/export"); + exportEl.textContent = JSON.stringify(data, null, 2); +} + +async function importWorkspace(workspaceId) { + if (!importEl) return; + const raw = (importEl.value || "").trim(); + if (!raw) throw new Error("import_json_missing"); + let payload = null; + try { payload = JSON.parse(raw); } catch { payload = null; } + if (!payload) throw new Error("import_json_invalid"); + await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/import", { + method: "POST", + body: JSON.stringify(payload), + }); +} + +async function refreshSkills(workspaceId) { + if (!skillsEl) return; + skillsEl.innerHTML = ""; + try { + const data = await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/skills"); + const items = Array.isArray(data && data.items) ? data.items : []; + if (!items.length) { + const empty = document.createElement("div"); + empty.className = "item"; + empty.textContent = "No skills found."; + skillsEl.appendChild(empty); + return; + } + for (const item of items) { + const row = document.createElement("div"); + row.className = "item"; + const top = document.createElement("div"); + top.className = "row"; + const left = document.createElement("div"); + const name = document.createElement("div"); + name.className = "mono"; + name.textContent = item.name; + const meta = document.createElement("div"); + meta.className = "small"; + meta.textContent = item.description || (item.scope ? String(item.scope) : ""); + left.appendChild(name); + if (meta.textContent) left.appendChild(meta); + + const delBtn = document.createElement("button"); + delBtn.className = "btn danger"; + delBtn.textContent = "Delete"; + delBtn.disabled = item.scope !== "project"; + delBtn.onclick = async () => { + try { + await apiFetch( + "/workspace/" + encodeURIComponent(workspaceId) + "/skills/" + encodeURIComponent(item.name), + { method: "DELETE" }, + ); + await refreshSkills(workspaceId); + } catch (e) { + setStatus(e && e.message ? e.message : "Delete failed", "bad"); + } + }; + + top.appendChild(left); + top.appendChild(delBtn); + row.appendChild(top); + skillsEl.appendChild(row); + } + } catch (e) { + const warn = document.createElement("div"); + warn.className = "item"; + warn.textContent = e && e.message ? e.message : "Skills unavailable"; + skillsEl.appendChild(warn); + } +} + +async function refreshPlugins(workspaceId) { + if (!pluginsEl) return; + pluginsEl.innerHTML = ""; + try { + const data = await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/plugins"); + const items = Array.isArray(data && data.items) ? data.items : []; + if (!items.length) { + const empty = document.createElement("div"); + empty.className = "item"; + empty.textContent = "No plugins."; + pluginsEl.appendChild(empty); + return; + } + for (const item of items) { + const row = document.createElement("div"); + row.className = "item"; + const top = document.createElement("div"); + top.className = "row"; + const left = document.createElement("div"); + const spec = document.createElement("div"); + spec.className = "mono"; + spec.textContent = item.spec; + const meta = document.createElement("div"); + meta.className = "small"; + meta.textContent = (item.source ? String(item.source) : "") + (item.scope ? " / " + String(item.scope) : ""); + left.appendChild(spec); + if (meta.textContent) left.appendChild(meta); + + const delBtn = document.createElement("button"); + delBtn.className = "btn danger"; + delBtn.textContent = "Remove"; + delBtn.disabled = item.source !== "config"; + delBtn.onclick = async () => { + try { + await apiFetch( + "/workspace/" + encodeURIComponent(workspaceId) + "/plugins/" + encodeURIComponent(item.spec), + { method: "DELETE" }, + ); + await refreshPlugins(workspaceId); + } catch (e) { + setStatus(e && e.message ? e.message : "Remove failed", "bad"); + } + }; + + top.appendChild(left); + top.appendChild(delBtn); + row.appendChild(top); + pluginsEl.appendChild(row); + } + } catch (e) { + const warn = document.createElement("div"); + warn.className = "item"; + warn.textContent = e && e.message ? e.message : "Plugins unavailable"; + pluginsEl.appendChild(warn); + } +} + +async function refreshMcp(workspaceId) { + if (!mcpEl) return; + mcpEl.innerHTML = ""; + try { + const data = await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/mcp"); + const items = Array.isArray(data && data.items) ? data.items : []; + if (!items.length) { + const empty = document.createElement("div"); + empty.className = "item"; + empty.textContent = "No MCP servers."; + mcpEl.appendChild(empty); + return; + } + for (const item of items) { + const row = document.createElement("div"); + row.className = "item"; + const name = document.createElement("div"); + name.className = "mono"; + name.textContent = item.name; + const meta = document.createElement("div"); + meta.className = "small"; + meta.textContent = item.disabledByTools ? "disabled" : "enabled"; + row.appendChild(name); + row.appendChild(meta); + mcpEl.appendChild(row); + } + } catch (e) { + const warn = document.createElement("div"); + warn.className = "item"; + warn.textContent = e && e.message ? e.message : "MCP unavailable"; + mcpEl.appendChild(warn); + } +} + +async function copyConnectArtifact() { + const text = connectEl.textContent || ""; + if (!text.trim()) return; + try { + await navigator.clipboard.writeText(text); + setStatus("Copied", "ok"); + } catch { + setStatus("Clipboard unavailable", "bad"); + } +} + +async function main() { + const workspaceId = parseWorkspaceIdFromPath(); + if (!workspaceId) { + const token = readToken(); + if (!token) { + appendMsg("system", "Open this as /ui#token= or /w//ui#token="); + return; + } + try { + const workspaces = await apiFetch("/workspaces"); + const active = (workspaces && workspaces.activeId) || (workspaces && workspaces.items && workspaces.items[0] && workspaces.items[0].id) || ""; + if (active) { + location.href = "/w/" + encodeURIComponent(active) + "/ui"; + return; + } + } catch { + // ignore + } + appendMsg("system", "No workspace configured."); + return; + } + + setRun("idle"); + clearTimeline(); + if (workspaceUrlEl) { + const wsUrl = location.origin + "/w/" + encodeURIComponent(workspaceId); + workspaceUrlEl.textContent = wsUrl; + workspaceUrlEl.href = wsUrl; + } + + await refreshHost(workspaceId); + sessionIdEl.textContent = readSessionId(workspaceId) ? ("session: " + readSessionId(workspaceId)) : "session: -"; + await refreshMessages(workspaceId).catch(() => undefined); + + setTab(activeTab); + if (tabsEl) { + const buttons = tabsEl.querySelectorAll(".tab"); + buttons.forEach((btn) => { + btn.onclick = async () => { + const tab = btn.getAttribute("data-tab") || "share"; + setTab(tab); + try { + if (tab === "skills") await refreshSkills(workspaceId); + if (tab === "plugins") await refreshPlugins(workspaceId); + if (tab === "apps") await refreshMcp(workspaceId); + if (tab === "share") await refreshTokens().catch(() => undefined); + } catch { + // ignore + } + }; + }); + } + qs("#btn-new").onclick = async () => { + try { + writeSessionId(workspaceId, ""); + const id = await ensureSession(workspaceId); + sessionIdEl.textContent = "session: " + id; + await refreshMessages(workspaceId); + } catch (e) { + setStatus(e && e.message ? e.message : "Failed to create session", "bad"); + } + }; + + qs("#btn-refresh").onclick = async () => { + await refreshMessages(workspaceId).catch((e) => setStatus(e && e.message ? e.message : "refresh failed", "bad")); + }; + + qs("#btn-delete-session").onclick = async () => { + const sessionId = readSessionId(workspaceId); + if (!sessionId) { + setStatus("No session selected", "bad"); + return; + } + if (!confirm("Delete this session? This cannot be undone.")) return; + try { + await apiFetch( + "/workspace/" + encodeURIComponent(workspaceId) + "/sessions/" + encodeURIComponent(sessionId), + { method: "DELETE" }, + ); + writeSessionId(workspaceId, ""); + sessionIdEl.textContent = "session: -"; + chatlog.innerHTML = ""; + clearTimeline(); + setRun("idle"); + setStatus("Session deleted", "ok"); + } catch (e) { + setStatus(e && e.message ? e.message : "delete failed", "bad"); + } + }; + + qs("#btn-send").onclick = async () => { + const text = (promptEl.value || "").trim(); + if (!text) return; + clearTimeline(); + addCheckpoint("prompt.submitted", text.length > 120 ? (text.slice(0, 120) + "...") : text); + setRun("running"); + void connectSse(workspaceId).catch(() => undefined); + appendMsg("user", text); + promptEl.value = ""; + try { + const sessionId = await ensureSession(workspaceId); + sessionIdEl.textContent = "session: " + sessionId; + const model = await resolveDefaultModel(workspaceId); + const body = { parts: [{ type: "text", text: text }] }; + if (model) body.model = model; + await apiFetch( + "/w/" + encodeURIComponent(workspaceId) + "/opencode/session/" + encodeURIComponent(sessionId) + "/prompt_async", + { method: "POST", body: JSON.stringify(body) }, + ); + setStatus("Prompt accepted", "ok"); + addCheckpoint("prompt.accepted"); + await refreshMessages(workspaceId).catch(() => undefined); + } catch (e) { + setStatus(e && e.message ? e.message : "Prompt failed", "bad"); + addCheckpoint("prompt.failed", e && e.message ? e.message : "Prompt failed"); + setRun("idle"); + } + }; + + qs("#btn-skill").onclick = () => { + const template = [ + "Turn this into a skill.", + "", + "Requirements:", + "- Skill name: my-skill", + "- Write to .opencode/skills/my-skill/SKILL.md", + "- Include usage, inputs, steps, and examples", + "", + "Use the most recent conversation as source material.", + ].join("\n"); + const existing = (promptEl.value || "").trim(); + promptEl.value = existing ? (existing + "\n\n" + template) : template; + promptEl.focus(); + }; + + qs("#btn-mint").onclick = async () => { + try { + await mintShareToken(workspaceId); + } catch (e) { + setStatus(e && e.message ? e.message : "Token mint failed", "bad"); + } + }; + + qs("#btn-deploy").onclick = () => { + setStatus("Deploy (Beta) is not implemented in the Toy UI yet", ""); + }; + + qs("#btn-events").onclick = async () => { + try { + await connectSse(workspaceId); + } catch (e) { + setStatus(e && e.message ? e.message : "SSE failed", "bad"); + } + }; + + qs("#btn-events-stop").onclick = () => stopSse(); + + qs("#btn-upload").onclick = async () => { + const input = qs("#file"); + const file = input && input.files && input.files[0] ? input.files[0] : null; + if (!file) { + setStatus("Pick a file first", "bad"); + return; + } + try { + const form = new FormData(); + form.set("file", file); + await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/inbox", { method: "POST", body: form }); + setStatus("Uploaded", "ok"); + } catch (e) { + setStatus(e && e.message ? e.message : "Upload failed", "bad"); + } + }; + + qs("#btn-artifacts").onclick = async () => { + await listArtifacts(workspaceId).catch((e) => setStatus(e && e.message ? e.message : "artifacts failed", "bad")); + }; + + qs("#btn-approvals").onclick = async () => { + await refreshApprovals().catch(() => undefined); + }; + + qs("#btn-share").onclick = async () => { + await showConnectArtifact(workspaceId).catch(() => undefined); + }; + + qs("#btn-copy").onclick = async () => { + await copyConnectArtifact(); + }; + + qs("#btn-tokens").onclick = async () => { + await refreshTokens().catch((e) => setStatus(e && e.message ? e.message : "tokens failed", "bad")); + }; + + qs("#btn-export").onclick = async () => { + try { + await exportWorkspace(workspaceId); + setStatus("Exported", "ok"); + } catch (e) { + setStatus(e && e.message ? e.message : "export failed", "bad"); + } + }; + + qs("#btn-import").onclick = async () => { + try { + await importWorkspace(workspaceId); + setStatus("Import requested (check approvals)", "ok"); + } catch (e) { + setStatus(e && e.message ? e.message : "import failed", "bad"); + } + }; + + qs("#btn-delete-workspace").onclick = async () => { + if (!confirm("Delete this workspace from the host's OpenWork server config?")) return; + try { + await apiFetch("/workspaces/" + encodeURIComponent(workspaceId), { method: "DELETE" }); + setStatus("Workspace deleted (refresh workspaces)", "ok"); + } catch (e) { + setStatus(e && e.message ? e.message : "workspace delete failed", "bad"); + } + }; + + qs("#btn-skills-refresh").onclick = async () => { + await refreshSkills(workspaceId).catch((e) => setStatus(e && e.message ? e.message : "skills failed", "bad")); + }; + + qs("#btn-plugins-refresh").onclick = async () => { + await refreshPlugins(workspaceId).catch((e) => setStatus(e && e.message ? e.message : "plugins failed", "bad")); + }; + + qs("#btn-plugin-add").onclick = async () => { + const spec = pluginSpecEl && pluginSpecEl.value ? String(pluginSpecEl.value).trim() : ""; + if (!spec) { + setStatus("plugin spec required", "bad"); + return; + } + try { + await apiFetch("/workspace/" + encodeURIComponent(workspaceId) + "/plugins", { + method: "POST", + body: JSON.stringify({ spec }), + }); + if (pluginSpecEl) pluginSpecEl.value = ""; + await refreshPlugins(workspaceId); + setStatus("Plugin added", "ok"); + } catch (e) { + setStatus(e && e.message ? e.message : "plugin add failed", "bad"); + } + }; + + qs("#btn-mcp-refresh").onclick = async () => { + await refreshMcp(workspaceId).catch((e) => setStatus(e && e.message ? e.message : "mcp failed", "bad")); + }; +} + +main().catch((e) => { + setStatus(e && e.message ? e.message : "Startup failed", "bad"); +}); +`; + +export function htmlResponse(body: string, status = 200): Response { + return new Response(body, { + status, + headers: { + "Content-Type": "text/html; charset=utf-8", + "Cache-Control": "no-store", + }, + }); +} + +export function cssResponse(body: string): Response { + return new Response(body, { + status: 200, + headers: { + "Content-Type": "text/css; charset=utf-8", + "Cache-Control": "no-store", + }, + }); +} + +export function jsResponse(body: string): Response { + return new Response(body, { + status: 200, + headers: { + "Content-Type": "text/javascript; charset=utf-8", + "Cache-Control": "no-store", + }, + }); +} + +export function svgResponse(body: string): Response { + return new Response(body, { + status: 200, + headers: { + "Content-Type": "image/svg+xml; charset=utf-8", + "Cache-Control": "no-store", + }, + }); +} diff --git a/apps/server/src/types.ts b/apps/server/src/types.ts new file mode 100644 index 0000000000..f737b30c8f --- /dev/null +++ b/apps/server/src/types.ts @@ -0,0 +1,228 @@ +export type WorkspaceType = "local" | "remote"; + +export type RemoteType = "opencode" | "openwork"; + +export type ApprovalMode = "manual" | "auto"; + +export type TokenScope = "owner" | "collaborator" | "viewer"; + +export type SandboxBackend = "none" | "docker" | "container"; + +export type ProviderPlacement = "in-sandbox" | "host-machine" | "client-machine" | "external"; + +export type LogFormat = "pretty" | "json"; + +export interface WorkspaceConfig { + id?: string; + path: string; + name?: string; + preset?: string; + workspaceType?: WorkspaceType; + remoteType?: RemoteType; + baseUrl?: string; + directory?: string; + displayName?: string; + openworkHostUrl?: string; + openworkToken?: string; + openworkWorkspaceId?: string; + openworkWorkspaceName?: string; + sandboxBackend?: string; + sandboxRunId?: string; + sandboxContainerName?: string; + opencodeUsername?: string; + opencodePassword?: string; +} + +export interface WorkspaceInfo { + id: string; + name: string; + path: string; + preset: string; + workspaceType: WorkspaceType; + remoteType?: RemoteType; + baseUrl?: string; + directory?: string; + displayName?: string; + openworkHostUrl?: string; + openworkToken?: string; + openworkWorkspaceId?: string; + openworkWorkspaceName?: string; + sandboxBackend?: string; + sandboxRunId?: string; + sandboxContainerName?: string; + opencodeUsername?: string; + opencodePassword?: string; + opencode?: { + baseUrl?: string; + directory?: string; + username?: string; + password?: string; + }; +} + +export interface OpencodeConfigFile { + path: string; + exists: boolean; + content: string | null; +} + +export interface ApprovalConfig { + mode: ApprovalMode; + timeoutMs: number; +} + +export interface ServerConfig { + host: string; + port: number; + token: string; + hostToken: string; + configPath?: string; + opencodeBaseUrl?: string; + opencodeDirectory?: string; + opencodeUsername?: string; + opencodePassword?: string; + approval: ApprovalConfig; + corsOrigins: string[]; + workspaces: WorkspaceInfo[]; + authorizedRoots: string[]; + readOnly: boolean; + startedAt: number; + tokenSource: "cli" | "env" | "file" | "generated"; + hostTokenSource: "cli" | "env" | "file" | "generated"; + logFormat: LogFormat; + logRequests: boolean; +} + +export interface Capabilities { + schemaVersion: number; + serverVersion: string; + skills: { read: boolean; write: boolean; source: "openwork" | "opencode" }; + hub: { + skills: { + read: boolean; + install: boolean; + repo: { owner: string; name: string; ref: string }; + }; + }; + plugins: { read: boolean; write: boolean }; + mcp: { read: boolean; write: boolean }; + commands: { read: boolean; write: boolean }; + config: { read: boolean; write: boolean }; + + approvals: { mode: ApprovalMode; timeoutMs: number }; + sandbox: { enabled: boolean; backend: SandboxBackend }; + ui: { toy: boolean }; + tokens: { scoped: boolean; scopes: TokenScope[] }; + proxy: { + opencode: boolean; + }; + toolProviders: { + browser: { + enabled: boolean; + placement: ProviderPlacement; + mode: "none" | "headless" | "interactive"; + }; + files: { + injection: boolean; + outbox: boolean; + inboxPath: string; + outboxPath: string; + maxBytes: number; + }; + }; +} + +export type ReloadReason = "plugins" | "skills" | "mcp" | "config" | "agents" | "commands"; + +export type ReloadTrigger = { + type: "skill" | "plugin" | "config" | "mcp" | "agent" | "command"; + name?: string; + action?: "added" | "removed" | "updated"; + path?: string; +}; + +export interface ReloadEvent { + id: string; + seq: number; + workspaceId: string; + reason: ReloadReason; + trigger?: ReloadTrigger; + timestamp: number; +} + +export interface ApiErrorBody { + code: string; + message: string; + details?: unknown; +} + +export interface PluginItem { + spec: string; + source: "config" | "dir.project" | "dir.global"; + scope: "project" | "global"; + path?: string; +} + +export interface McpItem { + name: string; + config: Record; + source: "config.project" | "config.global" | "config.remote"; + disabledByTools?: boolean; +} + +export interface SkillItem { + name: string; + path: string; + description: string; + scope: "project" | "global"; + trigger?: string; +} + +export interface HubSkillItem { + name: string; + description: string; + trigger?: string; + source: { + owner: string; + repo: string; + ref: string; + path: string; + }; +} + +export interface CommandItem { + name: string; + description?: string; + template: string; + agent?: string; + model?: string | null; + subtask?: boolean; + scope: "workspace" | "global"; +} + +export interface Actor { + type: "remote" | "host"; + clientId?: string; + tokenHash?: string; + scope?: TokenScope; +} + +export interface ApprovalRequest { + id: string; + workspaceId: string; + action: string; + summary: string; + paths: string[]; + createdAt: number; + actor: Actor; +} + +export interface AuditEntry { + id: string; + workspaceId: string; + actor: Actor; + action: string; + target: string; + summary: string; + timestamp: number; +} diff --git a/apps/server/src/utils.test.ts b/apps/server/src/utils.test.ts new file mode 100644 index 0000000000..65167a6e52 --- /dev/null +++ b/apps/server/src/utils.test.ts @@ -0,0 +1,65 @@ +import { describe, expect, test } from "bun:test"; +import { hashToken, shortId, parseList, ensureDir, exists } from "./utils.js"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +describe("hashToken", () => { + test("returns consistent hash for same input", () => { + const a = hashToken("my-secret-token"); + const b = hashToken("my-secret-token"); + expect(a).toBe(b); + }); + + test("returns different hashes for different inputs", () => { + const a = hashToken("token-a"); + const b = hashToken("token-b"); + expect(a).not.toBe(b); + }); + + test("returns a hex string", () => { + const hash = hashToken("test"); + expect(hash).toMatch(/^[0-9a-f]+$/); + }); +}); + +describe("shortId", () => { + test("returns a non-empty string", () => { + const id = shortId(); + expect(id.length).toBeGreaterThan(0); + }); + + test("returns unique values", () => { + const ids = new Set(Array.from({ length: 100 }, () => shortId())); + expect(ids.size).toBe(100); + }); +}); + +describe("parseList", () => { + test("splits comma-separated values", () => { + expect(parseList("a,b,c")).toEqual(["a", "b", "c"]); + }); + + test("trims whitespace", () => { + expect(parseList(" a , b , c ")).toEqual(["a", "b", "c"]); + }); + + test("filters empty entries", () => { + expect(parseList("a,,b,")).toEqual(["a", "b"]); + }); + + test("returns empty array for falsy input", () => { + expect(parseList(undefined)).toEqual([]); + expect(parseList("")).toEqual([]); + }); +}); + +describe("ensureDir + exists", () => { + test("creates nested directory and reports it exists", async () => { + const dir = join(tmpdir(), `openwork-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + const nested = join(dir, "a", "b", "c"); + + expect(await exists(nested)).toBe(false); + await ensureDir(nested); + expect(await exists(nested)).toBe(true); + }); +}); diff --git a/apps/server/src/utils.ts b/apps/server/src/utils.ts new file mode 100644 index 0000000000..cadd95fa89 --- /dev/null +++ b/apps/server/src/utils.ts @@ -0,0 +1,52 @@ +import { createHash, randomUUID } from "node:crypto"; +import { mkdir, readFile, stat } from "node:fs/promises"; + +export async function exists(path: string): Promise { + try { + await stat(path); + return true; + } catch { + return false; + } +} + +export async function ensureDir(path: string): Promise { + await mkdir(path, { recursive: true }); +} + +export async function readJsonFile(path: string): Promise { + try { + const raw = await readFile(path, "utf8"); + return JSON.parse(raw) as T; + } catch { + return null; + } +} + +export function hashToken(token: string): string { + return createHash("sha256").update(token).digest("hex"); +} + +export function shortId(): string { + return randomUUID(); +} + +export function parseList(input: string | undefined): string[] { + if (!input) return []; + const trimmed = input.trim(); + if (!trimmed) return []; + if (trimmed.startsWith("[")) { + try { + const parsed = JSON.parse(trimmed); + if (Array.isArray(parsed)) { + return parsed.map((item) => String(item)).filter(Boolean); + } + } catch { + return []; + } + } + return trimmed + .split(/[,;]/) + .map((value) => value.trim()) + .filter(Boolean); +} diff --git a/apps/server/src/validators.test.ts b/apps/server/src/validators.test.ts new file mode 100644 index 0000000000..2da35b644b --- /dev/null +++ b/apps/server/src/validators.test.ts @@ -0,0 +1,139 @@ +import { describe, expect, test } from "bun:test"; +import { + sanitizeCommandName, + validateCommandName, + validateMcpName, + validateSkillName, + validateMcpConfig, +} from "./validators.js"; + +describe("sanitizeCommandName", () => { + test("passes through valid names", () => { + expect(sanitizeCommandName("my-command")).toBe("my-command"); + expect(sanitizeCommandName("deploy")).toBe("deploy"); + }); + + test("trims whitespace", () => { + expect(sanitizeCommandName(" hello ")).toBe("hello"); + }); + + test("strips leading slashes", () => { + expect(sanitizeCommandName("/deploy")).toBe("deploy"); + expect(sanitizeCommandName("///deploy")).toBe("deploy"); + }); +}); + +describe("validateCommandName", () => { + test("accepts valid alphanumeric names", () => { + expect(() => validateCommandName("deploy")).not.toThrow(); + expect(() => validateCommandName("my-cmd")).not.toThrow(); + expect(() => validateCommandName("a_b_c")).not.toThrow(); + expect(() => validateCommandName("Cmd123")).not.toThrow(); + }); + + test("rejects empty string", () => { + expect(() => validateCommandName("")).toThrow(); + }); + + test("rejects names with slashes", () => { + expect(() => validateCommandName("foo/bar")).toThrow(); + expect(() => validateCommandName("foo\\bar")).toThrow(); + }); + + test("rejects names with dots", () => { + expect(() => validateCommandName("foo.bar")).toThrow(); + }); + + test("rejects names with spaces", () => { + expect(() => validateCommandName("foo bar")).toThrow(); + }); +}); + +describe("validateMcpName", () => { + test("accepts valid names", () => { + expect(() => validateMcpName("my-server")).not.toThrow(); + expect(() => validateMcpName("notion")).not.toThrow(); + expect(() => validateMcpName("a_b")).not.toThrow(); + }); + + test("rejects empty strings", () => { + expect(() => validateMcpName("")).toThrow(); + }); + + test("rejects names starting with dash", () => { + expect(() => validateMcpName("-bad")).toThrow(); + }); + + test("rejects names with special characters", () => { + expect(() => validateMcpName("foo.bar")).toThrow(); + expect(() => validateMcpName("foo bar")).toThrow(); + expect(() => validateMcpName("foo/bar")).toThrow(); + }); +}); + +describe("validateSkillName", () => { + test("accepts kebab-case names", () => { + expect(() => validateSkillName("my-skill")).not.toThrow(); + expect(() => validateSkillName("skill123")).not.toThrow(); + expect(() => validateSkillName("a")).not.toThrow(); + }); + + test("rejects empty strings", () => { + expect(() => validateSkillName("")).toThrow(); + }); + + test("rejects uppercase", () => { + expect(() => validateSkillName("MySkill")).toThrow(); + }); + + test("rejects underscores", () => { + expect(() => validateSkillName("my_skill")).toThrow(); + }); + + test("rejects names over 64 chars", () => { + expect(() => validateSkillName("a".repeat(65))).toThrow(); + }); +}); + +describe("validateMcpConfig", () => { + test("accepts valid remote config", () => { + expect(() => + validateMcpConfig({ type: "remote", url: "https://example.com" }), + ).not.toThrow(); + expect(() => + validateMcpConfig({ type: "remote", url: "http://localhost:8080/mcp" }), + ).not.toThrow(); + }); + + test("accepts valid local config", () => { + expect(() => + validateMcpConfig({ type: "local", command: ["npx", "my-server"] }), + ).not.toThrow(); + }); + + test("rejects unknown type", () => { + expect(() => validateMcpConfig({ type: "unknown" })).toThrow(); + }); + + test("rejects remote without url", () => { + expect(() => validateMcpConfig({ type: "remote" })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: "" })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: " " })).toThrow(); + }); + + test("rejects remote with invalid or non-http url", () => { + expect(() => validateMcpConfig({ type: "remote", url: "notaurl" })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: "https:example.com" })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: " https://example.com" })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: "https://example.com " })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: "file:///tmp/mcp" })).toThrow(); + expect(() => validateMcpConfig({ type: "remote", url: "javascript:alert(1)" })).toThrow(); + }); + + test("rejects local without command", () => { + expect(() => validateMcpConfig({ type: "local" })).toThrow(); + expect(() => validateMcpConfig({ type: "local", command: [] })).toThrow(); + expect(() => validateMcpConfig({ type: "local", command: ["", "foo"] })).toThrow(); + expect(() => validateMcpConfig({ type: "local", command: ["npx", 12] })).toThrow(); + }); +}); diff --git a/apps/server/src/validators.ts b/apps/server/src/validators.ts new file mode 100644 index 0000000000..bb6d8459f8 --- /dev/null +++ b/apps/server/src/validators.ts @@ -0,0 +1,79 @@ +import { ApiError } from "./errors.js"; + +const SKILL_NAME_REGEX = /^[a-z0-9]+(-[a-z0-9]+)*$/; +const COMMAND_NAME_REGEX = /^[A-Za-z0-9_-]+$/; +const MCP_NAME_REGEX = /^[A-Za-z0-9_-]+$/; + +export function validateSkillName(name: string): void { + if (!name || name.length < 1 || name.length > 64 || !SKILL_NAME_REGEX.test(name)) { + throw new ApiError(400, "invalid_skill_name", "Skill name must be kebab-case (1-64 chars)"); + } +} + +export function validateDescription(description: string | undefined): void { + if (!description || description.length < 1 || description.length > 1024) { + throw new ApiError(422, "invalid_description", "Description must be 1-1024 characters"); + } +} + +export function validatePluginSpec(spec: string): void { + if (!spec || spec.trim().length === 0) { + throw new ApiError(400, "invalid_plugin_spec", "Plugin spec is required"); + } +} + +export function sanitizeCommandName(name: string): string { + const trimmed = name.trim().replace(/^\/+/, ""); + return trimmed; +} + +export function validateCommandName(name: string): void { + if (!name || !COMMAND_NAME_REGEX.test(name)) { + throw new ApiError(400, "invalid_command_name", "Command name must be alphanumeric with _ or -"); + } +} + +export function validateMcpName(name: string): void { + if (!name || name.startsWith("-") || !MCP_NAME_REGEX.test(name)) { + throw new ApiError(400, "invalid_mcp_name", "MCP name must be alphanumeric and not start with -"); + } +} + +export function validateMcpConfig(config: Record): void { + const type = config.type; + if (type !== "local" && type !== "remote") { + throw new ApiError(400, "invalid_mcp_config", "MCP config type must be local or remote"); + } + if (type === "local") { + const command = config.command; + if ( + !Array.isArray(command) || + command.length === 0 || + command.some((part) => typeof part !== "string" || part.trim().length === 0) + ) { + throw new ApiError(400, "invalid_mcp_config", "Local MCP requires command array"); + } + } + if (type === "remote") { + const url = config.url; + if (!url || typeof url !== "string" || url.trim().length === 0) { + throw new ApiError(400, "invalid_mcp_config", "Remote MCP requires url"); + } + const normalizedUrl = url.trim(); + if (url !== normalizedUrl) { + throw new ApiError(400, "invalid_mcp_config", "Remote MCP url must not include surrounding whitespace"); + } + if (!/^https?:\/\//i.test(normalizedUrl)) { + throw new ApiError(400, "invalid_mcp_config", "Remote MCP url must start with http(s)://"); + } + try { + const parsed = new URL(normalizedUrl); + if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { + throw new ApiError(400, "invalid_mcp_config", "Remote MCP url must use http(s)"); + } + } catch (error) { + if (error instanceof ApiError) throw error; + throw new ApiError(400, "invalid_mcp_config", "Remote MCP requires a valid url"); + } + } +} diff --git a/apps/server/src/workspace-activate.e2e.test.ts b/apps/server/src/workspace-activate.e2e.test.ts new file mode 100644 index 0000000000..b9376e8002 --- /dev/null +++ b/apps/server/src/workspace-activate.e2e.test.ts @@ -0,0 +1,115 @@ +import { afterEach, describe, expect, test } from "bun:test"; +import { mkdtemp, mkdir, rm } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +import { startServer } from "./server.js"; +import type { ServerConfig } from "./types.js"; + +type Served = { + port: number; + stop: (closeActiveConnections?: boolean) => void | Promise; +}; + +const stops: Array<() => void | Promise> = []; +const roots: string[] = []; + +afterEach(async () => { + while (stops.length) { + await stops.pop()?.(); + } + while (roots.length) { + await rm(roots.pop()!, { recursive: true, force: true }); + } +}); + +async function createWorkspaceRoot() { + const root = await mkdtemp(join(tmpdir(), "openwork-activate-")); + await mkdir(join(root, ".opencode"), { recursive: true }); + roots.push(root); + return root; +} + +function hostAuth(token: string) { + return { "X-OpenWork-Host-Token": token }; +} + +function startMockOpencode() { + const requests: Array<{ pathname: string; search: string }> = []; + const server = Bun.serve({ + hostname: "127.0.0.1", + port: 0, + fetch(request) { + const url = new URL(request.url); + requests.push({ pathname: url.pathname, search: url.search }); + + if (url.pathname === "/instance/dispose") { + return Response.json({ disposed: true }); + } + + return Response.json({ code: "not_found", message: "Not found" }, { status: 404 }); + }, + }) as Served; + stops.push(() => server.stop(true)); + return { server, requests }; +} + +function startOpenworkServer(input: { workspaceRoot: string; opencodeBaseUrl: string }) { + const config: ServerConfig = { + host: "127.0.0.1", + port: 0, + token: "owt_test_token", + hostToken: "owt_host_token", + approval: { mode: "auto", timeoutMs: 1000 }, + corsOrigins: ["*"], + workspaces: [ + { + id: "ws_1", + name: "Workspace", + path: input.workspaceRoot, + preset: "starter", + workspaceType: "local", + baseUrl: input.opencodeBaseUrl, + }, + ], + authorizedRoots: [input.workspaceRoot], + readOnly: false, + startedAt: Date.now(), + tokenSource: "cli", + hostTokenSource: "cli", + logFormat: "pretty", + logRequests: false, + }; + const server = startServer(config) as Served; + stops.push(() => server.stop(true)); + return { server, hostToken: config.hostToken }; +} + +describe("workspace activation", () => { + test("reloads the bound OpenCode engine on activate", async () => { + const workspaceRoot = await createWorkspaceRoot(); + const mock = startMockOpencode(); + const openwork = startOpenworkServer({ + workspaceRoot, + opencodeBaseUrl: `http://127.0.0.1:${mock.server.port}`, + }); + + const base = `http://127.0.0.1:${openwork.server.port}`; + const response = await fetch(`${base}/workspaces/ws_1/activate`, { + method: "POST", + headers: hostAuth(openwork.hostToken), + }); + + expect(response.status).toBe(200); + const body = await response.json(); + expect(body.activeId).toBe("ws_1"); + + const reloadRequest = mock.requests.find( + (request) => request.pathname === "/instance/dispose", + ); + expect(reloadRequest).toBeDefined(); + expect(reloadRequest?.search).toContain( + `directory=${encodeURIComponent(workspaceRoot)}`, + ); + }); +}); diff --git a/apps/server/src/workspace-export-safety.test.ts b/apps/server/src/workspace-export-safety.test.ts new file mode 100644 index 0000000000..0ebf64b487 --- /dev/null +++ b/apps/server/src/workspace-export-safety.test.ts @@ -0,0 +1,116 @@ +import { describe, expect, test } from "bun:test"; + +import { + collectWorkspaceExportWarnings, + stripSensitiveWorkspaceExportData, +} from "./workspace-export-safety.js"; + +describe("workspace export safety", () => { + test("does not warn for benign mcp, plugin, and portable files", () => { + const warnings = collectWorkspaceExportWarnings({ + opencode: { + mcp: { jira: { type: "remote", url: "https://jira.example.com/mcp", key: "primary" } }, + plugin: { demo: { key: "theme-dark" } }, + }, + files: [ + { path: ".opencode/plugins/demo/index.ts", content: "const key = 'primary'; export default { enabled: true }" }, + { path: ".opencode/tools/run.ts", content: "console.log('hello')" }, + ], + }); + + expect(warnings).toEqual([]); + }); + + test("warns only when secret-like keys or values are present", () => { + const warnings = collectWorkspaceExportWarnings({ + opencode: { + mcp: { + jira: { + headers: { Authorization: "Bearer abcdefghijklmnop" }, + apiKey: "super-secret-key", + }, + }, + plugin: { + demo: { token: "ghp_1234567890abcdef", enabled: true, key: "AbCDef1234567890+/token" }, + }, + }, + files: [ + { path: ".opencode/plugins/demo/index.ts", content: "const apiKey = 'abc123456789';" }, + { + path: ".opencode/tools/run.ts", + content: 'const key = "AbCdEf1234567890+/token"; fetch("https://example.com/path/with/a/really/long/url/that/looks/suspicious/123456789")', + }, + ], + }); + + expect(warnings.map((warning) => warning.id)).toEqual([ + "mcp-config", + "plugin-config", + "portable-file:.opencode/plugins/demo/index.ts", + "portable-file:.opencode/tools/run.ts", + ]); + expect(warnings[0]?.detail).toContain("apiKey"); + expect(warnings[0]?.detail).toContain("Bearer"); + expect(warnings[1]?.detail).toContain("token"); + expect(warnings[1]?.detail).toContain("key"); + expect(warnings[2]?.detail).toContain("apiKey"); + expect(warnings[3]?.detail).toContain("key"); + expect(warnings[3]?.detail).toContain("long URL"); + }); + + test("warns when a non-portable provider section still contains secrets", () => { + const warnings = collectWorkspaceExportWarnings({ + opencode: { + provider: { + openai: { + options: { + apiKey: "sk_live_1234567890abcdef", + }, + }, + }, + mcp: { jira: { type: "remote", url: "https://jira.example.com/mcp" } }, + }, + files: [], + }); + + expect(warnings.map((warning) => warning.id)).toEqual(["provider-config"]); + expect(warnings[0]?.label).toBe("Provider settings"); + expect(warnings[0]?.detail).toContain("apiKey"); + }); + + test("exclude mode removes only flagged values and files", () => { + const sanitized = stripSensitiveWorkspaceExportData({ + opencode: { + plugin: { + demo: { + enabled: true, + token: "ghp_1234567890abcdef", + }, + }, + mcp: { + jira: { + enabled: true, + apiKey: "super-secret-key", + url: "https://jira.example.com/mcp", + }, + }, + command: { review: { template: "Review it" } }, + }, + files: [ + { path: ".opencode/plugins/demo/index.ts", content: "const token = 'secret';" }, + { path: ".opencode/tools/run.ts", content: "console.log('safe tool');" }, + { path: ".opencode/agents/reviewer.md", content: "agent" }, + ], + }); + + expect(sanitized.opencode).toEqual({ + plugin: { demo: { enabled: true } }, + mcp: { jira: { enabled: true, url: "https://jira.example.com/mcp" } }, + command: { review: { template: "Review it" } }, + }); + expect(sanitized.files).toEqual([ + { path: ".opencode/tools/run.ts", content: "console.log('safe tool');" }, + { path: ".opencode/agents/reviewer.md", content: "agent" }, + ]); + }); +}); diff --git a/apps/server/src/workspace-export-safety.ts b/apps/server/src/workspace-export-safety.ts new file mode 100644 index 0000000000..844616749b --- /dev/null +++ b/apps/server/src/workspace-export-safety.ts @@ -0,0 +1,323 @@ +import type { PortableFile } from "./portable-files.js"; + +export type WorkspaceExportSensitiveMode = "auto" | "include" | "exclude"; + +export type WorkspaceExportWarning = { + id: string; + label: string; + detail: string; +}; + +const CONFIG_SECTION_METADATA: Record = { + mcp: { + warningId: "mcp-config", + label: "MCP servers", + intro: "Contains secret-like MCP config", + }, + plugin: { + warningId: "plugin-config", + label: "Plugin settings", + intro: "Contains secret-like plugin config", + }, + provider: { + warningId: "provider-config", + label: "Provider settings", + intro: "Contains secret-like provider config", + }, +}; + +const PORTABLE_FILE_PREFIXES = [".opencode/plugins/", ".opencode/tools/"] as const; + +const COMMON_SECRET_KEY_PATTERNS = [ + { id: "apiKey", test: (tokens: string[], normalized: string) => normalized.includes("apikey") || hasWordPair(tokens, "api", "key") }, + { id: "key", test: (tokens: string[], normalized: string) => tokens.length === 1 && normalized === "key" }, + { id: "token", test: (tokens: string[], normalized: string) => tokens.includes("token") || normalized.includes("authtoken") || normalized.includes("accesstoken") || normalized.includes("refreshtoken") }, + { id: "Bearer", test: (tokens: string[], normalized: string) => tokens.includes("bearer") || normalized.includes("authorization") }, + { id: "secret", test: (tokens: string[], normalized: string) => tokens.includes("secret") || hasWordPair(tokens, "client", "secret") }, + { id: "password", test: (tokens: string[], normalized: string) => tokens.includes("password") || normalized.includes("passwd") }, + { id: "credentials", test: (tokens: string[], normalized: string) => tokens.includes("credential") || tokens.includes("credentials") || normalized.includes("credential") }, + { id: "privateKey", test: (tokens: string[]) => hasWordPair(tokens, "private", "key") }, +] as const; + +const KNOWN_SECRET_VALUE_PATTERNS = [ + { id: "Bearer", test: (value: string) => /\bBearer\s+[A-Za-z0-9._~+\/-]+=*/.test(value) }, + { id: "token", test: (value: string) => /\b(?:ghp|gho|github_pat|xox[baprs]|sk|rk|AKIA|ASIA|AIza)[-_A-Za-z0-9]{8,}\b/.test(value) }, + { id: "JWT", test: (value: string) => /\beyJ[A-Za-z0-9_-]+\.[A-Za-z0-9._-]+\.[A-Za-z0-9._-]+\b/.test(value) }, +] as const; + +const RAW_SECRET_TEXT_PATTERNS = [ + { id: "apiKey", test: (value: string) => /\bapi[_-]?key\b/i.test(value) }, + { id: "token", test: (value: string) => /\b(?:access[_-]?token|refresh[_-]?token|auth[_-]?token|token)\b/i.test(value) }, + { id: "Bearer", test: (value: string) => /\bBearer\b/.test(value) }, + { id: "secret", test: (value: string) => /\b(?:client[_-]?secret|secret)\b/i.test(value) }, + { id: "password", test: (value: string) => /\b(?:password|passwd)\b/i.test(value) }, + { id: "credentials", test: (value: string) => /\bcredentials?\b/i.test(value) }, + { id: "privateKey", test: (value: string) => /\bprivate[_-]?key\b/i.test(value) }, +] as const; + +const GENERIC_KEY_ASSIGNMENT_PATTERNS = [ + /\bkey\b\s*[:=]\s*["'`]([^"'`\n]{12,})["'`]/gi, + /["'`]key["'`]\s*:\s*["'`]([^"'`\n]{12,})["'`]/gi, +] as const; + +function cloneJson(value: T): T { + return JSON.parse(JSON.stringify(value)); +} + +function hasWordPair(tokens: string[], left: string, right: string): boolean { + return tokens.includes(left) && tokens.includes(right); +} + +function splitNameIntoTokens(value: string): string[] { + return value + .replace(/([a-z0-9])([A-Z])/g, "$1 $2") + .split(/[^A-Za-z0-9]+/) + .map((token) => token.trim().toLowerCase()) + .filter(Boolean); +} + +function detectSensitiveKeySignals(key: string, value: unknown): string[] { + const tokens = splitNameIntoTokens(key); + if (!tokens.length) return []; + const normalized = tokens.join(""); + const matches = COMMON_SECRET_KEY_PATTERNS.filter((pattern) => pattern.test(tokens, normalized)).map((pattern) => pattern.id); + + if (tokens.includes("public") && tokens.includes("key")) { + return matches.filter((match) => match !== "privateKey"); + } + + const primitive = typeof value === "string" || typeof value === "number" || typeof value === "boolean"; + const genericKeyOnly = tokens.length === 1 && tokens[0] === "key"; + if (genericKeyOnly && !primitive) return []; + if (genericKeyOnly) { + if (typeof value !== "string") return []; + if (!looksLikeGenericSecretValue(value)) return []; + } + + return matches; +} + +function looksLikeGenericSecretValue(value: string): boolean { + const trimmed = value.trim(); + if (trimmed.length < 16) return false; + if (/\s/.test(trimmed)) return false; + if (detectSensitiveStringSignals(trimmed).some((match) => match !== "key")) return true; + if (/^[a-f0-9]{32,}$/i.test(trimmed)) return true; + + const classes = [/[a-z]/.test(trimmed), /[A-Z]/.test(trimmed), /\d/.test(trimmed), /[-_=+/.]/.test(trimmed)].filter(Boolean).length; + return classes >= 3 && /^[A-Za-z0-9._~+\/-=]+$/.test(trimmed); +} + +function detectGenericKeyAssignments(value: string): string[] { + const matches = new Set(); + for (const pattern of GENERIC_KEY_ASSIGNMENT_PATTERNS) { + pattern.lastIndex = 0; + for (const match of value.matchAll(pattern)) { + const candidate = typeof match[1] === "string" ? match[1] : ""; + if (looksLikeGenericSecretValue(candidate)) { + matches.add("key"); + } + } + } + return Array.from(matches); +} + +function detectSensitiveStringSignals(value: string): string[] { + const trimmed = value.trim(); + if (!trimmed) return []; + + const matches = new Set(); + for (const pattern of KNOWN_SECRET_VALUE_PATTERNS) { + if (pattern.test(trimmed)) { + matches.add(pattern.id); + } + } + + if (/https?:\/\//i.test(trimmed) && trimmed.length > 32) { + matches.add("long URL"); + } + + for (const pattern of RAW_SECRET_TEXT_PATTERNS) { + if (pattern.test(trimmed)) { + matches.add(pattern.id); + } + } + + for (const match of detectGenericKeyAssignments(trimmed)) { + matches.add(match); + } + + return Array.from(matches); +} + +function collectSignals(value: unknown, keyHint?: string): string[] { + const matches = new Set(); + + if (keyHint) { + for (const match of detectSensitiveKeySignals(keyHint, value)) { + matches.add(match); + } + } + + if (typeof value === "string") { + for (const match of detectSensitiveStringSignals(value)) { + matches.add(match); + } + return Array.from(matches); + } + + if (Array.isArray(value)) { + for (const item of value) { + for (const match of collectSignals(item)) { + matches.add(match); + } + } + return Array.from(matches); + } + + if (value && typeof value === "object") { + for (const [childKey, childValue] of Object.entries(value as Record)) { + for (const match of collectSignals(childValue, childKey)) { + matches.add(match); + } + } + } + + return Array.from(matches); +} + +function describeSignals(intro: string, signals: string[]): string { + const unique = Array.from(new Set(signals)); + if (!unique.length) return intro + "."; + return `${intro}: ${unique.slice(0, 4).join(", ")}${unique.length > 4 ? ", ..." : ""}.`; +} + +function sanitizeValue(value: unknown, keyHint?: string): unknown { + const directSignals = new Set(); + if (keyHint) { + for (const match of detectSensitiveKeySignals(keyHint, value)) { + directSignals.add(match); + } + } + if (typeof value === "string") { + for (const match of detectSensitiveStringSignals(value)) { + directSignals.add(match); + } + return directSignals.size ? undefined : value; + } + + if (directSignals.size) return undefined; + + if (Array.isArray(value)) { + const items = value + .map((item) => sanitizeValue(item)) + .filter((item) => item !== undefined); + return items; + } + + if (value && typeof value === "object") { + const next: Record = {}; + for (const [childKey, childValue] of Object.entries(value as Record)) { + const sanitized = sanitizeValue(childValue, childKey); + if (sanitized === undefined) continue; + if (Array.isArray(sanitized) && sanitized.length === 0) continue; + if (sanitized && typeof sanitized === "object" && !Array.isArray(sanitized) && Object.keys(sanitized as Record).length === 0) { + continue; + } + next[childKey] = sanitized; + } + return next; + } + + return value; +} + +function isPortableFileCandidate(path: string): boolean { + return PORTABLE_FILE_PREFIXES.some((prefix) => path.startsWith(prefix)); +} + +export function collectWorkspaceExportWarnings(input: { + opencode: Record | null | undefined; + files: PortableFile[]; +}): WorkspaceExportWarning[] { + const warnings = new Map(); + const opencode = input.opencode ?? {}; + + for (const [sectionKey, sectionValue] of Object.entries(opencode)) { + const signals = collectSignals(sectionValue); + if (!signals.length) continue; + const metadata = + CONFIG_SECTION_METADATA[sectionKey] ?? { + warningId: `config-${sectionKey}`, + label: formatSectionLabel(sectionKey), + intro: `Contains secret-like ${sectionKey} config`, + }; + warnings.set(metadata.warningId, { + id: metadata.warningId, + label: metadata.label, + detail: describeSignals(metadata.intro, signals), + }); + } + + for (const file of input.files) { + const path = String(file.path ?? "").trim(); + if (!path || !isPortableFileCandidate(path)) continue; + const signals = collectSignals(file.content); + if (!signals.length) continue; + warnings.set(`portable-file:${path}`, { + id: `portable-file:${path}`, + label: path, + detail: describeSignals("Contains secret-like file content", signals), + }); + } + + return Array.from(warnings.values()); +} + +export function stripSensitiveWorkspaceExportData(input: { + opencode: Record | null | undefined; + files: PortableFile[]; +}): { + opencode: Record; + files: PortableFile[]; +} { + const opencode = cloneJson( + input.opencode && typeof input.opencode === "object" && !Array.isArray(input.opencode) + ? input.opencode + : {}, + ) as Record; + + for (const [sectionKey, sectionValue] of Object.entries(opencode)) { + const sanitized = sanitizeValue(sectionValue); + if (sanitized === undefined) { + delete opencode[sectionKey]; + continue; + } + if (sanitized && typeof sanitized === "object" && !Array.isArray(sanitized) && Object.keys(sanitized as Record).length === 0) { + delete opencode[sectionKey]; + continue; + } + if (Array.isArray(sanitized) && sanitized.length === 0) { + delete opencode[sectionKey]; + continue; + } + opencode[sectionKey] = sanitized; + } + + const files = input.files + .filter((file) => { + const path = String(file.path ?? "").trim(); + if (!isPortableFileCandidate(path)) return true; + return collectSignals(file.content).length === 0; + }) + .map((file) => ({ ...file })); + + return { opencode, files }; +} + +function formatSectionLabel(sectionKey: string): string { + return sectionKey + .replace(/([a-z0-9])([A-Z])/g, "$1 $2") + .replace(/[-_]+/g, " ") + .replace(/^./, (char) => char.toUpperCase()); +} diff --git a/apps/server/src/workspace-files.ts b/apps/server/src/workspace-files.ts new file mode 100644 index 0000000000..dd8d4a7a6b --- /dev/null +++ b/apps/server/src/workspace-files.ts @@ -0,0 +1,30 @@ +import { existsSync } from "node:fs"; +import { join } from "node:path"; + +export function opencodeConfigPath(workspaceRoot: string): string { + const jsoncPath = join(workspaceRoot, "opencode.jsonc"); + const jsonPath = join(workspaceRoot, "opencode.json"); + const hiddenJsoncPath = join(workspaceRoot, ".opencode", "opencode.jsonc"); + const hiddenJsonPath = join(workspaceRoot, ".opencode", "opencode.json"); + if (existsSync(jsoncPath)) return jsoncPath; + if (existsSync(jsonPath)) return jsonPath; + if (existsSync(hiddenJsoncPath)) return hiddenJsoncPath; + if (existsSync(hiddenJsonPath)) return hiddenJsonPath; + return jsoncPath; +} + +export function openworkConfigPath(workspaceRoot: string): string { + return join(workspaceRoot, ".opencode", "openwork.json"); +} + +export function projectSkillsDir(workspaceRoot: string): string { + return join(workspaceRoot, ".opencode", "skills"); +} + +export function projectCommandsDir(workspaceRoot: string): string { + return join(workspaceRoot, ".opencode", "commands"); +} + +export function projectPluginsDir(workspaceRoot: string): string { + return join(workspaceRoot, ".opencode", "plugins"); +} diff --git a/apps/server/src/workspace-import-preview.test.ts b/apps/server/src/workspace-import-preview.test.ts new file mode 100644 index 0000000000..c2a4ce6646 --- /dev/null +++ b/apps/server/src/workspace-import-preview.test.ts @@ -0,0 +1,805 @@ +import { afterEach, describe, expect, test } from "bun:test"; +import { mkdir, mkdtemp, readFile, rm, stat, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +import { auditLogPath } from "./audit.js"; +import { buildCommandContent } from "./commands.js"; +import { startServer } from "./server.js"; +import { buildSkillContent } from "./skills.js"; +import type { ServerConfig } from "./types.js"; +import { + buildWorkspaceImportPreview, + publicWorkspaceImportPreview, + summarizeWorkspaceImportApplied, + summarizeWorkspaceImportPreview, + workspaceImportPreviewApprovalPaths, +} from "./workspace-import-preview.js"; + +const tempDirs: string[] = []; + +afterEach(async () => { + while (tempDirs.length > 0) { + const dir = tempDirs.pop(); + if (!dir) continue; + await rm(dir, { recursive: true, force: true }); + } +}); + +async function makeWorkspace(): Promise { + const dir = await mkdtemp(join(tmpdir(), "openwork-import-preview-")); + tempDirs.push(dir); + await mkdir(join(dir, ".opencode"), { recursive: true }); + return dir; +} + +async function pathExists(path: string): Promise { + try { + await stat(path); + return true; + } catch { + return false; + } +} + +function makeServerConfig(workspace: string, dataDir: string): ServerConfig { + return { + host: "127.0.0.1", + port: 0, + token: "test-token", + hostToken: "host-token", + configPath: join(dataDir, "config.json"), + approval: { mode: "auto", timeoutMs: 1000 }, + corsOrigins: [], + workspaces: [ + { + id: "workspace", + name: "workspace", + path: workspace, + preset: "default", + workspaceType: "local", + }, + ], + authorizedRoots: [workspace], + readOnly: false, + startedAt: Date.now(), + tokenSource: "generated", + hostTokenSource: "generated", + logFormat: "pretty", + logRequests: false, + }; +} + +type TestHeaders = { + Authorization: string; + "Content-Type": string; +}; + +async function requestWorkspaceImportPreview( + baseUrl: string, + headers: TestHeaders, + payload: Record, +): Promise<{ fingerprint: string; summary: { create: number; update: number; delete: number; unchanged: number } }> { + const response = await fetch(`${baseUrl}/workspace/workspace/import/preview`, { + method: "POST", + headers, + body: JSON.stringify(payload), + }); + expect(response.status).toBe(200); + return await response.json() as { + fingerprint: string; + summary: { create: number; update: number; delete: number; unchanged: number }; + }; +} + +async function requestWorkspaceImportWithPreview( + baseUrl: string, + headers: TestHeaders, + payload: Record, +): Promise { + const preview = await requestWorkspaceImportPreview(baseUrl, headers, payload); + return await fetch(`${baseUrl}/workspace/workspace/import`, { + method: "POST", + headers, + body: JSON.stringify({ ...payload, previewFingerprint: preview.fingerprint }), + }); +} + +async function silenceExpectedServerError(run: () => Promise): Promise { + const originalError = console.error; + console.error = (...args: unknown[]) => { + if (args[0] === "[openwork-server] Unhandled error:") return; + originalError(...args); + }; + try { + return await run(); + } finally { + console.error = originalError; + } +} + +async function waitForPendingApproval(baseUrl: string): Promise { + for (let attempt = 0; attempt < 50; attempt += 1) { + const response = await fetch(`${baseUrl}/approvals`, { + headers: { "X-OpenWork-Host-Token": "host-token" }, + }); + expect(response.status).toBe(200); + const body = await response.json() as { items: Array<{ id: string }> }; + const approval = body.items[0]; + if (approval) return approval.id; + await new Promise((resolve) => setTimeout(resolve, 20)); + } + throw new Error("Timed out waiting for approval request"); +} + +describe("workspace import preview", () => { + test("summarizes workspace import changes without writing files", async () => { + const workspace = await makeWorkspace(); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["old-plugin"] }\n', "utf8"); + await mkdir(join(workspace, ".opencode", "skills", "demo"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "skills", "demo", "SKILL.md"), "old skill\n", "utf8"); + await mkdir(join(workspace, ".opencode", "commands"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "commands", "old.md"), "old command\n", "utf8"); + await mkdir(join(workspace, ".opencode", "tools"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "plugins"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "tools", "existing.ts"), "old tool\n", "utf8"); + await writeFile(join(workspace, ".opencode", "plugins", "removed.ts"), "removed plugin\n", "utf8"); + + const preview = await buildWorkspaceImportPreview(workspace, { + mode: { skills: "replace", commands: "replace", files: "replace" }, + opencode: { + plugin: ["old-plugin", "new-plugin"], + }, + openwork: { + blueprint: { + materialized: { + sessions: { items: [{ templateId: "old", sessionId: "ses_123" }] }, + }, + }, + }, + skills: [ + { name: "demo", description: "Demo skill", content: "---\nname: demo\ndescription: Demo skill\n---\nupdated\n" }, + { name: "new-skill", description: "New skill", content: "---\nname: new-skill\ndescription: New skill\n---\nbody\n" }, + ], + commands: [ + { content: "---\nname: old\ndescription: Old command\n---\nupdated command\n" }, + { name: "new-command", template: "run new command" }, + ], + files: [ + { path: ".opencode/tools/existing.ts", content: "new tool\n" }, + { path: ".opencode/agents/new.md", content: "new agent\n" }, + ], + }); + + expect(preview.summary).toEqual({ + total: 9, + create: 4, + update: 4, + replace: 0, + delete: 1, + unchanged: 0, + }); + expect(preview.changes.map((change) => [change.kind, change.action, change.path])).toEqual([ + ["opencode", "update", "opencode.jsonc"], + ["openwork", "create", ".opencode/openwork.json"], + ["skill", "update", ".opencode/skills/demo/SKILL.md"], + ["skill", "create", ".opencode/skills/new-skill/SKILL.md"], + ["command", "update", ".opencode/commands/old.md"], + ["command", "create", ".opencode/commands/new-command.md"], + ["file", "update", ".opencode/tools/existing.ts"], + ["file", "create", ".opencode/agents/new.md"], + ["file", "delete", ".opencode/plugins/removed.ts"], + ]); + + expect(await readFile(join(workspace, ".opencode", "tools", "existing.ts"), "utf8")).toBe("old tool\n"); + }); + + test("marks identical config as unchanged and excludes it from approval paths", async () => { + const workspace = await makeWorkspace(); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["demo"] }\n', "utf8"); + + const preview = await buildWorkspaceImportPreview(workspace, { + opencode: { plugin: ["demo"] }, + }); + + expect(preview.summary).toEqual({ + total: 1, + create: 0, + update: 0, + replace: 0, + delete: 0, + unchanged: 1, + }); + expect(preview.changes[0]?.action).toBe("unchanged"); + expect(workspaceImportPreviewApprovalPaths(preview)).toEqual([]); + expect(summarizeWorkspaceImportPreview(preview)).toBe("Import workspace config (no changes)"); + }); + + test("marks identical skills and commands unchanged", async () => { + const workspace = await makeWorkspace(); + const skill = { + name: "demo", + description: "Demo skill", + content: "---\nname: demo\ndescription: Demo skill\n---\nbody\n", + }; + const command = { + name: "demo-command", + description: "Demo command", + template: "do the thing", + }; + const skillContent = buildSkillContent(skill); + const commandContent = buildCommandContent(command); + await mkdir(join(workspace, ".opencode", "skills", "demo"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "commands"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "skills", "demo", "SKILL.md"), skillContent.content, "utf8"); + await writeFile(join(workspace, ".opencode", "commands", "demo-command.md"), commandContent.content, "utf8"); + + const preview = await buildWorkspaceImportPreview(workspace, { + skills: [skill], + commands: [command], + }); + + expect(preview.changes.map((change) => [change.kind, change.action, change.path])).toEqual([ + ["skill", "unchanged", ".opencode/skills/demo/SKILL.md"], + ["command", "unchanged", ".opencode/commands/demo-command.md"], + ]); + expect(workspaceImportPreviewApprovalPaths(preview)).toEqual([]); + expect(publicWorkspaceImportPreview(preview).changes[0]).not.toHaveProperty("absolutePath"); + expect(publicWorkspaceImportPreview(preview).changes[0]).not.toHaveProperty("beforeDigest"); + }); + + test("uses replace action for config replacement", async () => { + const workspace = await makeWorkspace(); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["old"] }\n', "utf8"); + + const preview = await buildWorkspaceImportPreview(workspace, { + mode: { opencode: "replace" }, + opencode: { plugin: ["new"] }, + }); + + expect(preview.changes[0]).toMatchObject({ + kind: "opencode", + action: "replace", + path: "opencode.jsonc", + }); + expect(summarizeWorkspaceImportPreview(preview)).toBe("Import workspace config (update 1)"); + expect(summarizeWorkspaceImportApplied(preview)).toBe("Imported workspace config (update 1)"); + }); + + test("rejects unsafe portable file paths before import", async () => { + const workspace = await makeWorkspace(); + + await expect( + buildWorkspaceImportPreview(workspace, { + files: [{ path: ".opencode/.env", content: "SECRET=value\n" }], + }), + ).rejects.toThrow(/Portable file path is not allowed/i); + }); + + test("replace preview treats empty sections as delete all", async () => { + const workspace = await makeWorkspace(); + await mkdir(join(workspace, ".opencode", "skills", "old-skill"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "commands"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "agents"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "skills", "old-skill", "SKILL.md"), "old skill\n", "utf8"); + await writeFile(join(workspace, ".opencode", "commands", "old-command.md"), "old command\n", "utf8"); + await writeFile(join(workspace, ".opencode", "agents", "old.md"), "old agent\n", "utf8"); + + const preview = await buildWorkspaceImportPreview(workspace, { + mode: { skills: "replace", commands: "replace", files: "replace" }, + skills: [], + commands: [], + files: [], + }); + + expect(preview.summary).toMatchObject({ + total: 3, + delete: 3, + }); + expect(preview.changes.map((change) => [change.kind, change.action, change.path])).toEqual([ + ["skill", "delete", ".opencode/skills/old-skill"], + ["command", "delete", ".opencode/commands/old-command.md"], + ["file", "delete", ".opencode/agents/old.md"], + ]); + }); + + test("preview route returns public changes and no-op import does not audit", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["demo"] }\n', "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const body = JSON.stringify({ opencode: { plugin: ["demo"] } }); + + const previewResponse = await fetch(`${baseUrl}/workspace/workspace/import/preview`, { + method: "POST", + headers, + body, + }); + expect(previewResponse.status).toBe(200); + const preview = await previewResponse.json() as Record; + expect(typeof preview.fingerprint).toBe("string"); + expect((preview.changes as Array>)[0]).not.toHaveProperty("absolutePath"); + expect((preview.changes as Array>)[0]).not.toHaveProperty("beforeDigest"); + + const importResponse = await fetch(`${baseUrl}/workspace/workspace/import`, { + method: "POST", + headers, + body, + }); + expect(importResponse.status).toBe(200); + const imported = await importResponse.json() as Record; + expect(imported.preview).toEqual(preview); + expect(await pathExists(auditLogPath("workspace"))).toBe(false); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("no-op import validates preview fingerprint shape", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["demo"] }\n', "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const response = await fetch(`http://127.0.0.1:${server.port}/workspace/workspace/import`, { + method: "POST", + headers: { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }, + body: JSON.stringify({ + opencode: { plugin: ["demo"] }, + previewFingerprint: 123, + }), + }); + + expect(response.status).toBe(400); + const body = await response.json() as { code: string }; + expect(body.code).toBe("invalid_workspace_import_preview_fingerprint"); + expect(await pathExists(auditLogPath("workspace"))).toBe(false); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("changed import requires a reviewed preview fingerprint", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const response = await fetch(`http://127.0.0.1:${server.port}/workspace/workspace/import`, { + method: "POST", + headers: { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }, + body: JSON.stringify({ + opencode: { plugin: ["demo"] }, + }), + }); + + expect(response.status).toBe(409); + const body = await response.json() as { + code: string; + preview: { fingerprint: string; summary: { create: number } }; + }; + expect(body.code).toBe("workspace_import_preview_required"); + expect(typeof body.preview.fingerprint).toBe("string"); + expect(body.preview.summary.create).toBe(1); + expect(await pathExists(join(workspace, "opencode.jsonc"))).toBe(false); + expect(await pathExists(auditLogPath("workspace"))).toBe(false); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("import route writes changed items and records audit", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const response = await requestWorkspaceImportWithPreview(baseUrl, headers, { + opencode: { plugin: ["demo"] }, + skills: [ + { + name: "demo", + description: "Demo skill", + content: "Use this skill for demo work.", + }, + ], + files: [{ path: ".opencode/agents/demo.md", content: "Demo agent\n" }], + }); + + expect(response.status).toBe(200); + const body = await response.json() as { preview: { summary: { create: number } } }; + expect(body.preview.summary.create).toBe(3); + expect(await readFile(join(workspace, "opencode.jsonc"), "utf8")).toContain('"plugin"'); + expect(await readFile(join(workspace, ".opencode", "skills", "demo", "SKILL.md"), "utf8")).toContain("Demo skill"); + expect(await readFile(join(workspace, ".opencode", "agents", "demo.md"), "utf8")).toBe("Demo agent\n"); + expect(await readFile(auditLogPath("workspace"), "utf8")).toContain("Imported workspace config"); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("replace import route removes extra skills, commands, and portable files", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + + const keepSkill = { + name: "keep", + description: "Keep skill", + content: "Use this skill for stable setup.", + }; + const keepCommand = { + name: "keep-command", + description: "Keep command", + template: "run stable setup", + }; + const keepSkillContent = buildSkillContent(keepSkill).content; + const keepCommandContent = buildCommandContent(keepCommand).content; + + await mkdir(join(workspace, ".opencode", "skills", "keep"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "skills", "remove-me"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "commands"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "tools"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "skills", "keep", "SKILL.md"), keepSkillContent, "utf8"); + await writeFile(join(workspace, ".opencode", "skills", "remove-me", "SKILL.md"), "legacy skill\n", "utf8"); + await writeFile(join(workspace, ".opencode", "commands", "keep-command.md"), keepCommandContent, "utf8"); + await writeFile(join(workspace, ".opencode", "commands", "remove-me.md"), "legacy command\n", "utf8"); + await writeFile(join(workspace, ".opencode", "tools", "shared.ts"), "shared tool\n", "utf8"); + await writeFile(join(workspace, ".opencode", "tools", "remove-me.ts"), "legacy tool\n", "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const response = await requestWorkspaceImportWithPreview(baseUrl, headers, { + mode: { skills: "replace", commands: "replace", files: "replace" }, + skills: [keepSkill], + commands: [keepCommand], + files: [{ path: ".opencode/tools/shared.ts", content: "shared tool\n" }], + }); + + expect(response.status).toBe(200); + const body = await response.json() as { + preview: { summary: { delete: number; unchanged: number } }; + }; + expect(body.preview.summary.delete).toBe(3); + expect(body.preview.summary.unchanged).toBe(3); + expect(await pathExists(join(workspace, ".opencode", "skills", "remove-me"))).toBe(false); + expect(await pathExists(join(workspace, ".opencode", "commands", "remove-me.md"))).toBe(false); + expect(await pathExists(join(workspace, ".opencode", "tools", "remove-me.ts"))).toBe(false); + expect(await readFile(join(workspace, ".opencode", "skills", "keep", "SKILL.md"), "utf8")).toBe(keepSkillContent); + expect(await readFile(join(workspace, ".opencode", "commands", "keep-command.md"), "utf8")).toBe(keepCommandContent); + expect(await readFile(join(workspace, ".opencode", "tools", "shared.ts"), "utf8")).toBe("shared tool\n"); + expect(await readFile(auditLogPath("workspace"), "utf8")).toContain("Imported workspace config (remove 3)"); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("replace import route honors empty sections", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + + await mkdir(join(workspace, ".opencode", "skills", "old-skill"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "commands"), { recursive: true }); + await mkdir(join(workspace, ".opencode", "agents"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "skills", "old-skill", "SKILL.md"), "old skill\n", "utf8"); + await writeFile(join(workspace, ".opencode", "commands", "old-command.md"), "old command\n", "utf8"); + await writeFile(join(workspace, ".opencode", "agents", "old.md"), "old agent\n", "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const response = await requestWorkspaceImportWithPreview(baseUrl, headers, { + mode: { skills: "replace", commands: "replace", files: "replace" }, + skills: [], + commands: [], + files: [], + }); + + expect(response.status).toBe(200); + const body = await response.json() as { + preview: { summary: { delete: number } }; + }; + expect(body.preview.summary.delete).toBe(3); + expect(await pathExists(join(workspace, ".opencode", "skills", "old-skill"))).toBe(false); + expect(await pathExists(join(workspace, ".opencode", "commands", "old-command.md"))).toBe(false); + expect(await pathExists(join(workspace, ".opencode", "agents", "old.md"))).toBe(false); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("import route rejects a stale reviewed preview", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["old"] }\n', "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const payload = { opencode: { plugin: ["new"] } }; + + const previewResponse = await fetch(`${baseUrl}/workspace/workspace/import/preview`, { + method: "POST", + headers, + body: JSON.stringify(payload), + }); + expect(previewResponse.status).toBe(200); + const preview = await previewResponse.json() as { fingerprint: string }; + + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["changed-after-preview"] }\n', "utf8"); + + const importResponse = await fetch(`${baseUrl}/workspace/workspace/import`, { + method: "POST", + headers, + body: JSON.stringify({ ...payload, previewFingerprint: preview.fingerprint }), + }); + expect(importResponse.status).toBe(409); + const rejected = await importResponse.json() as { + code: string; + preview: { fingerprint: string }; + }; + expect(rejected.code).toBe("workspace_import_preview_stale"); + expect(rejected.preview.fingerprint).not.toBe(preview.fingerprint); + expect(await readFile(join(workspace, "opencode.jsonc"), "utf8")).toContain("changed-after-preview"); + expect(await pathExists(auditLogPath("workspace"))).toBe(false); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("import route revalidates the preview after approval", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["old"] }\n', "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const serverConfig = makeServerConfig(workspace, dataDir); + serverConfig.approval = { mode: "manual", timeoutMs: 5000 }; + const server = startServer(serverConfig) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const payload = { opencode: { plugin: ["new"] } }; + const preview = await requestWorkspaceImportPreview(baseUrl, headers, payload); + + const importPromise = fetch(`${baseUrl}/workspace/workspace/import`, { + method: "POST", + headers, + body: JSON.stringify({ ...payload, previewFingerprint: preview.fingerprint }), + }); + + const approvalId = await waitForPendingApproval(baseUrl); + await writeFile(join(workspace, "opencode.jsonc"), '{ "plugin": ["changed-during-approval"] }\n', "utf8"); + const approvalResponse = await fetch(`${baseUrl}/approvals/${approvalId}`, { + method: "POST", + headers: { + "X-OpenWork-Host-Token": "host-token", + "Content-Type": "application/json", + }, + body: JSON.stringify({ reply: "allow" }), + }); + expect(approvalResponse.status).toBe(200); + + const importResponse = await importPromise; + expect(importResponse.status).toBe(409); + const rejected = await importResponse.json() as { + code: string; + preview: { fingerprint: string }; + }; + expect(rejected.code).toBe("workspace_import_preview_stale"); + expect(rejected.preview.fingerprint).not.toBe(preview.fingerprint); + expect(await readFile(join(workspace, "opencode.jsonc"), "utf8")).toContain("changed-during-approval"); + expect(await pathExists(auditLogPath("workspace"))).toBe(false); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("import route validates preview fingerprint shape", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const response = await fetch(`http://127.0.0.1:${server.port}/workspace/workspace/import`, { + method: "POST", + headers: { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }, + body: JSON.stringify({ + opencode: { plugin: ["demo"] }, + previewFingerprint: 123, + }), + }); + + expect(response.status).toBe(400); + const body = await response.json() as { code: string }; + expect(body.code).toBe("invalid_workspace_import_preview_fingerprint"); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); + + test("replace import keeps existing items when an incoming write fails", async () => { + const workspace = await makeWorkspace(); + const dataDir = await mkdtemp(join(tmpdir(), "openwork-import-preview-data-")); + tempDirs.push(dataDir); + + await mkdir(join(workspace, ".opencode", "skills", "old"), { recursive: true }); + await writeFile(join(workspace, ".opencode", "skills", "old", "SKILL.md"), "old skill\n", "utf8"); + await writeFile(join(workspace, ".opencode", "skills", "new"), "blocks new skill directory\n", "utf8"); + + const originalDataDir = process.env.OPENWORK_DATA_DIR; + process.env.OPENWORK_DATA_DIR = dataDir; + const server = startServer(makeServerConfig(workspace, dataDir)) as { + port: number; + stop: (force?: boolean) => void; + }; + try { + const baseUrl = `http://127.0.0.1:${server.port}`; + const headers = { + Authorization: "Bearer test-token", + "Content-Type": "application/json", + }; + const response = await silenceExpectedServerError(() => + requestWorkspaceImportWithPreview(baseUrl, headers, { + mode: { skills: "replace" }, + skills: [ + { + name: "new", + description: "New skill", + content: "new skill\n", + }, + ], + }), + ); + + expect(response.ok).toBe(false); + expect(response.status).toBe(500); + expect(await readFile(join(workspace, ".opencode", "skills", "old", "SKILL.md"), "utf8")).toBe("old skill\n"); + expect(await readFile(join(workspace, ".opencode", "skills", "new"), "utf8")).toBe( + "blocks new skill directory\n", + ); + } finally { + server.stop(true); + if (originalDataDir === undefined) { + delete process.env.OPENWORK_DATA_DIR; + } else { + process.env.OPENWORK_DATA_DIR = originalDataDir; + } + } + }); +}); diff --git a/apps/server/src/workspace-import-preview.ts b/apps/server/src/workspace-import-preview.ts new file mode 100644 index 0000000000..7c5a8af93a --- /dev/null +++ b/apps/server/src/workspace-import-preview.ts @@ -0,0 +1,511 @@ +import { createHash } from "node:crypto"; +import { readdir, readFile } from "node:fs/promises"; +import { join, relative } from "node:path"; + +import { sanitizeOpenworkTemplateConfig } from "./blueprint-sessions.js"; +import { buildCommandContent } from "./commands.js"; +import { ApiError } from "./errors.js"; +import { parseFrontmatter } from "./frontmatter.js"; +import { readJsoncFile } from "./jsonc.js"; +import { planPortableFiles, listPortableFilePaths, type PortableFile } from "./portable-files.js"; +import { sanitizePortableOpencodeConfig } from "./portable-opencode.js"; +import { buildSkillContent } from "./skills.js"; +import { exists } from "./utils.js"; +import { sanitizeCommandName, validateCommandName, validateSkillName } from "./validators.js"; +import { + opencodeConfigPath, + openworkConfigPath, + projectCommandsDir, + projectSkillsDir, +} from "./workspace-files.js"; + +export type WorkspaceImportMode = "merge" | "replace"; +export type WorkspaceImportChangeKind = "opencode" | "openwork" | "skill" | "command" | "file"; +export type WorkspaceImportChangeAction = "create" | "update" | "replace" | "delete" | "unchanged"; + +export type WorkspaceImportChange = { + kind: WorkspaceImportChangeKind; + action: WorkspaceImportChangeAction; + label: string; + path: string; +}; + +type WorkspaceImportPlannedChange = WorkspaceImportChange & { + absolutePath: string; + beforeDigest: string; + afterDigest: string; +}; + +export type WorkspaceImportPreview = { + fingerprint: string; + summary: { + total: number; + create: number; + update: number; + replace: number; + delete: number; + unchanged: number; + }; + changes: WorkspaceImportChange[]; +}; + +export type WorkspaceImportPlan = Omit & { + changes: WorkspaceImportPlannedChange[]; +}; + +type WorkspaceImportSection = "opencode" | "openwork" | "skills" | "commands" | "files"; + +export type NormalizedWorkspaceImport = { + modes: Record; + sections: Record; + opencode?: Record; + openwork?: Record; + skills: Array<{ name: string; content: string; description?: string }>; + commands: Array<{ + name: string; + template: string; + description?: string; + agent?: string; + model?: string | null; + subtask?: boolean; + }>; + files: PortableFile[]; +}; + +function readRecord(value: unknown): Record | null { + return value && typeof value === "object" && !Array.isArray(value) + ? (value as Record) + : null; +} + +function readMode(value: unknown): WorkspaceImportMode { + return value === "replace" ? "replace" : "merge"; +} + +function normalizeModes(value: unknown): Record { + const record = readRecord(value) ?? {}; + return { + opencode: readMode(record.opencode), + openwork: readMode(record.openwork), + skills: readMode(record.skills), + commands: readMode(record.commands), + files: readMode(record.files), + }; +} + +function readArray(value: unknown, label: string): Record[] { + if (value === undefined || value === null) return []; + if (!Array.isArray(value)) { + throw new ApiError(400, "invalid_workspace_import_payload", `${label} must be an array`); + } + return value.map((item, index) => { + const record = readRecord(item); + if (!record) { + throw new ApiError(400, "invalid_workspace_import_payload", `${label}[${index}] must be an object`); + } + return record; + }); +} + +function normalizeSkills(value: unknown): NormalizedWorkspaceImport["skills"] { + return readArray(value, "skills").map((skill) => { + const name = String(skill.name ?? "").trim(); + const content = typeof skill.content === "string" ? skill.content : ""; + validateSkillName(name); + if (!content) { + throw new ApiError(400, "invalid_skill_content", "Skill content is required"); + } + return { + name, + content, + description: typeof skill.description === "string" ? skill.description : undefined, + }; + }); +} + +function normalizeCommands(value: unknown): NormalizedWorkspaceImport["commands"] { + return readArray(value, "commands").map((command) => { + if (typeof command.content === "string" && command.content.trim()) { + const parsed = parseFrontmatter(command.content); + const name = sanitizeCommandName( + String(command.name || (typeof parsed.data.name === "string" ? parsed.data.name : "")), + ); + validateCommandName(name); + const template = parsed.body.trim(); + if (!template) { + throw new ApiError(400, "invalid_command_template", "Command template is required"); + } + return { + name, + template, + description: + typeof command.description === "string" + ? command.description + : typeof parsed.data.description === "string" + ? parsed.data.description + : undefined, + agent: typeof parsed.data.agent === "string" ? parsed.data.agent : undefined, + model: typeof parsed.data.model === "string" ? parsed.data.model : undefined, + subtask: typeof parsed.data.subtask === "boolean" ? parsed.data.subtask : undefined, + }; + } + + const name = sanitizeCommandName(String(command.name ?? "")); + validateCommandName(name); + const template = typeof command.template === "string" ? command.template : ""; + if (!template.trim()) { + throw new ApiError(400, "invalid_command_template", "Command template is required"); + } + return { + name, + template, + description: typeof command.description === "string" ? command.description : undefined, + agent: typeof command.agent === "string" ? command.agent : undefined, + model: typeof command.model === "string" ? command.model : null, + subtask: typeof command.subtask === "boolean" ? command.subtask : undefined, + }; + }); +} + +export function normalizeWorkspaceImportPayload( + workspaceRoot: string, + payload: Record, +): NormalizedWorkspaceImport { + return { + modes: normalizeModes(payload.mode), + sections: { + opencode: payload.opencode !== undefined, + openwork: payload.openwork !== undefined, + skills: payload.skills !== undefined, + commands: payload.commands !== undefined, + files: payload.files !== undefined, + }, + ...(payload.opencode !== undefined + ? { opencode: sanitizePortableOpencodeConfig(readRecord(payload.opencode)) } + : {}), + ...(payload.openwork !== undefined + ? { openwork: sanitizeOpenworkTemplateConfig(readRecord(payload.openwork)) } + : {}), + skills: normalizeSkills(payload.skills), + commands: normalizeCommands(payload.commands), + files: planPortableFiles(workspaceRoot, payload.files).map((file) => ({ + path: file.path, + content: file.content, + })), + }; +} + +function stableStringify(value: unknown): string { + if (Array.isArray(value)) { + return `[${value.map(stableStringify).join(",")}]`; + } + if (value && typeof value === "object") { + const entries = Object.entries(value as Record) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([key, child]) => `${JSON.stringify(key)}:${stableStringify(child)}`); + return `{${entries.join(",")}}`; + } + return JSON.stringify(value) ?? "undefined"; +} + +function digest(value: unknown): string { + return createHash("sha256").update(stableStringify(value)).digest("hex"); +} + +function textDigest(content: string | null): string { + return digest(content === null ? { type: "missing" } : { type: "text", content }); +} + +function jsonDigest(value: unknown): string { + return digest({ type: "json", value }); +} + +function sameJson(left: unknown, right: unknown): boolean { + return stableStringify(left) === stableStringify(right); +} + +function actionForTarget( + existsBefore: boolean, + changed: boolean, + mode: WorkspaceImportMode, +): WorkspaceImportChangeAction { + if (!existsBefore) return "create"; + if (!changed) return "unchanged"; + return mode === "replace" ? "replace" : "update"; +} + +function rel(workspaceRoot: string, absolutePath: string): string { + return relative(workspaceRoot, absolutePath).replaceAll("\\", "/"); +} + +function isMissingFileError(error: unknown): boolean { + return Boolean(error && typeof error === "object" && "code" in error && error.code === "ENOENT"); +} + +async function readTextIfPresent(path: string): Promise { + try { + return await readFile(path, "utf8"); + } catch (error) { + if (isMissingFileError(error)) return null; + throw error; + } +} + +function countSummary(changes: WorkspaceImportChange[]): WorkspaceImportPreview["summary"] { + return changes.reduce( + (summary, change) => { + summary.total += 1; + summary[change.action] += 1; + return summary; + }, + { total: 0, create: 0, update: 0, replace: 0, delete: 0, unchanged: 0 }, + ); +} + +function fingerprintWorkspaceImportChanges(changes: WorkspaceImportPlannedChange[]): string { + return digest( + changes.map((change) => ({ + kind: change.kind, + action: change.action, + path: change.path, + beforeDigest: change.beforeDigest, + afterDigest: change.afterDigest, + })), + ); +} + +async function readOpenworkConfig(path: string): Promise> { + const raw = await readTextIfPresent(path); + if (raw === null) return {}; + try { + return JSON.parse(raw) as Record; + } catch { + throw new ApiError(422, "invalid_json", "Failed to parse openwork.json"); + } +} + +async function listProjectSkillNames(workspaceRoot: string): Promise { + const dir = projectSkillsDir(workspaceRoot); + if (!(await exists(dir))) return []; + const entries = await readdir(dir, { withFileTypes: true }); + const names: string[] = []; + for (const entry of entries) { + if (!entry.isDirectory()) continue; + if (await exists(join(dir, entry.name, "SKILL.md"))) { + names.push(entry.name); + } + } + return names.sort(); +} + +async function listProjectCommandNames(workspaceRoot: string): Promise { + const dir = projectCommandsDir(workspaceRoot); + if (!(await exists(dir))) return []; + const entries = await readdir(dir, { withFileTypes: true }); + return entries + .filter((entry) => entry.isFile() && entry.name.endsWith(".md")) + .map((entry) => entry.name.replace(/\.md$/, "")) + .sort(); +} + +export async function buildWorkspaceImportPreview( + workspaceRoot: string, + payload: Record, +): Promise { + const input = normalizeWorkspaceImportPayload(workspaceRoot, payload); + const changes: WorkspaceImportPlannedChange[] = []; + + if (input.opencode !== undefined) { + const path = opencodeConfigPath(workspaceRoot); + const before = await readJsoncFile(path, {} as Record); + const after = input.modes.opencode === "replace" ? input.opencode : { ...before.data, ...input.opencode }; + changes.push({ + kind: "opencode", + action: actionForTarget(Boolean(before.raw), !sameJson(before.data, after), input.modes.opencode), + label: "OpenCode config", + path: rel(workspaceRoot, path), + absolutePath: path, + beforeDigest: jsonDigest(before.data), + afterDigest: jsonDigest(after), + }); + } + + if (input.openwork !== undefined) { + const path = openworkConfigPath(workspaceRoot); + const existsBefore = await exists(path); + const before = await readOpenworkConfig(path); + const after = input.modes.openwork === "replace" ? input.openwork : { ...before, ...input.openwork }; + changes.push({ + kind: "openwork", + action: actionForTarget(existsBefore, !sameJson(before, after), input.modes.openwork), + label: "OpenWork config", + path: rel(workspaceRoot, path), + absolutePath: path, + beforeDigest: existsBefore ? jsonDigest(before) : textDigest(null), + afterDigest: jsonDigest(after), + }); + } + + if (input.sections.skills) { + const existing = new Set(await listProjectSkillNames(workspaceRoot)); + const incoming = new Set(); + for (const skill of input.skills) { + incoming.add(skill.name); + const path = join(projectSkillsDir(workspaceRoot), skill.name, "SKILL.md"); + const existsBefore = existing.has(skill.name); + const before = existsBefore ? await readTextIfPresent(path) : null; + const next = buildSkillContent(skill); + changes.push({ + kind: "skill", + action: actionForTarget(before !== null, before !== next.content, "merge"), + label: skill.name, + path: rel(workspaceRoot, path), + absolutePath: path, + beforeDigest: before !== null ? textDigest(before) : textDigest(null), + afterDigest: textDigest(next.content), + }); + } + if (input.modes.skills === "replace") { + for (const name of existing) { + if (incoming.has(name)) continue; + const path = join(projectSkillsDir(workspaceRoot), name); + const skillFile = join(path, "SKILL.md"); + const before = await readTextIfPresent(skillFile); + if (before === null) continue; + changes.push({ + kind: "skill", + action: "delete", + label: name, + path: rel(workspaceRoot, path), + absolutePath: path, + beforeDigest: textDigest(before), + afterDigest: textDigest(null), + }); + } + } + } + + if (input.sections.commands) { + const existing = new Set(await listProjectCommandNames(workspaceRoot)); + const incoming = new Set(); + for (const command of input.commands) { + incoming.add(command.name); + const path = join(projectCommandsDir(workspaceRoot), `${command.name}.md`); + const existsBefore = existing.has(command.name); + const before = existsBefore ? await readTextIfPresent(path) : null; + const next = buildCommandContent(command); + changes.push({ + kind: "command", + action: actionForTarget(before !== null, before !== next.content, "merge"), + label: command.name, + path: rel(workspaceRoot, path), + absolutePath: path, + beforeDigest: before !== null ? textDigest(before) : textDigest(null), + afterDigest: textDigest(next.content), + }); + } + if (input.modes.commands === "replace") { + for (const name of existing) { + if (incoming.has(name)) continue; + const path = join(projectCommandsDir(workspaceRoot), `${name}.md`); + const before = await readTextIfPresent(path); + if (before === null) continue; + changes.push({ + kind: "command", + action: "delete", + label: name, + path: rel(workspaceRoot, path), + absolutePath: path, + beforeDigest: textDigest(before), + afterDigest: textDigest(null), + }); + } + } + } + + if (input.sections.files) { + const incoming = new Set(); + for (const file of input.files) { + incoming.add(file.path); + const path = join(workspaceRoot, file.path); + const existsBefore = await exists(path); + const before = existsBefore ? await readTextIfPresent(path) : null; + changes.push({ + kind: "file", + action: actionForTarget(before !== null, before !== file.content, "merge"), + label: file.path, + path: file.path, + absolutePath: path, + beforeDigest: before !== null ? textDigest(before) : textDigest(null), + afterDigest: textDigest(file.content), + }); + } + if (input.modes.files === "replace") { + for (const filePath of await listPortableFilePaths(workspaceRoot)) { + if (incoming.has(filePath)) continue; + const path = join(workspaceRoot, filePath); + const before = await readTextIfPresent(path); + if (before === null) continue; + changes.push({ + kind: "file", + action: "delete", + label: filePath, + path: filePath, + absolutePath: path, + beforeDigest: textDigest(before), + afterDigest: textDigest(null), + }); + } + } + } + + const summary = countSummary(changes); + return { + fingerprint: fingerprintWorkspaceImportChanges(changes), + summary, + changes, + }; +} + +export function publicWorkspaceImportPreview(preview: WorkspaceImportPlan): WorkspaceImportPreview { + return { + fingerprint: preview.fingerprint, + summary: preview.summary, + changes: preview.changes.map( + ({ absolutePath: _absolutePath, beforeDigest: _beforeDigest, afterDigest: _afterDigest, ...change }) => change, + ), + }; +} + +export function workspaceImportPreviewApprovalPaths(preview: WorkspaceImportPlan): string[] { + return Array.from( + new Set( + preview.changes + .filter((change) => change.action !== "unchanged") + .map((change) => change.absolutePath), + ), + ); +} + +function countLabel(verb: string, count: number): string | null { + if (count <= 0) return null; + return `${verb} ${count}`; +} + +function summarizeWorkspaceImport(prefix: "Import" | "Imported", preview: WorkspaceImportPreview): string { + const parts = [ + countLabel("add", preview.summary.create), + countLabel("update", preview.summary.update + preview.summary.replace), + countLabel("remove", preview.summary.delete), + ].filter((part): part is string => Boolean(part)); + + return parts.length ? `${prefix} workspace config (${parts.join(", ")})` : `${prefix} workspace config (no changes)`; +} + +export function summarizeWorkspaceImportPreview(preview: WorkspaceImportPreview): string { + return summarizeWorkspaceImport("Import", preview); +} + +export function summarizeWorkspaceImportApplied(preview: WorkspaceImportPreview): string { + return summarizeWorkspaceImport("Imported", preview); +} diff --git a/apps/server/src/workspace-init.ts b/apps/server/src/workspace-init.ts new file mode 100644 index 0000000000..349790fb94 --- /dev/null +++ b/apps/server/src/workspace-init.ts @@ -0,0 +1,100 @@ +import { basename, join } from "node:path"; +import { readFile, writeFile } from "node:fs/promises"; + +import { ensureDir, exists } from "./utils.js"; +import { ApiError } from "./errors.js"; +import { openworkConfigPath, opencodeConfigPath } from "./workspace-files.js"; +import { readJsoncFile, writeJsoncFile } from "./jsonc.js"; + +const OPENWORK_AGENT = `--- +description: OpenWork default agent +mode: primary +temperature: 0.2 +--- + +You are OpenWork. + +Help the user work on files safely from this workspace. Prefer clear, practical steps. If required setup or credentials are missing, ask one targeted question and continue once provided. +`; + +type WorkspaceOpenworkConfig = { + version: number; + workspace?: { + name?: string | null; + createdAt?: number | null; + preset?: string | null; + } | null; + authorizedRoots: string[]; + reload?: { + auto?: boolean; + resume?: boolean; + } | null; +}; + +function normalizePreset(preset: string | null | undefined): string { + const trimmed = preset?.trim() ?? ""; + if (!trimmed) return "starter"; + return trimmed; +} + +async function ensureWorkspaceOpenworkConfig(workspaceRoot: string, preset: string): Promise { + const path = openworkConfigPath(workspaceRoot); + if (await exists(path)) return; + const now = Date.now(); + const config: WorkspaceOpenworkConfig = { + version: 1, + workspace: { + name: basename(workspaceRoot) || "Workspace", + createdAt: now, + preset, + }, + authorizedRoots: [workspaceRoot], + reload: null, + }; + await ensureDir(join(workspaceRoot, ".opencode")); + await writeFile(path, JSON.stringify(config, null, 2) + "\n", "utf8"); +} + +async function ensureOpencodeConfig(workspaceRoot: string): Promise { + const path = opencodeConfigPath(workspaceRoot); + const { data } = await readJsoncFile>(path, { + $schema: "https://opencode.ai/config.json", + }); + const next: Record = data && typeof data === "object" && !Array.isArray(data) + ? { ...data } + : { $schema: "https://opencode.ai/config.json" }; + + if (typeof next.default_agent !== "string" || !next.default_agent.trim()) { + next.default_agent = "openwork"; + } + + await writeJsoncFile(path, next); +} + +async function ensureOpenworkAgent(workspaceRoot: string): Promise { + const agentsDir = join(workspaceRoot, ".opencode", "agents"); + const agentPath = join(agentsDir, "openwork.md"); + if (await exists(agentPath)) return; + await ensureDir(agentsDir); + await writeFile(agentPath, OPENWORK_AGENT.endsWith("\n") ? OPENWORK_AGENT : `${OPENWORK_AGENT}\n`, "utf8"); +} + +export async function ensureWorkspaceFiles(workspaceRoot: string, presetInput: string): Promise { + const preset = normalizePreset(presetInput); + if (!workspaceRoot.trim()) { + throw new ApiError(400, "invalid_workspace_path", "workspace path is required"); + } + await ensureDir(workspaceRoot); + await ensureOpencodeConfig(workspaceRoot); + await ensureOpenworkAgent(workspaceRoot); + await ensureWorkspaceOpenworkConfig(workspaceRoot, preset); +} + +export async function readRawOpencodeConfig(path: string): Promise<{ exists: boolean; content: string | null }> { + const hasFile = await exists(path); + if (!hasFile) { + return { exists: false, content: null }; + } + const content = await readFile(path, "utf8"); + return { exists: true, content }; +} diff --git a/apps/server/src/workspaces.ts b/apps/server/src/workspaces.ts new file mode 100644 index 0000000000..c65a8ed81c --- /dev/null +++ b/apps/server/src/workspaces.ts @@ -0,0 +1,72 @@ +import { createHash } from "node:crypto"; +import { basename, resolve } from "node:path"; +import type { WorkspaceConfig, WorkspaceInfo } from "./types.js"; + +function workspaceIdForKey(key: string): string { + const hash = createHash("sha256").update(key).digest("hex"); + return `ws_${hash.slice(0, 12)}`; +} + +export function workspaceIdForPath(path: string): string { + return workspaceIdForKey(path); +} + +export function workspaceIdForRemote(baseUrl: string, directory?: string | null): string { + const normalizedBaseUrl = baseUrl.trim(); + const normalizedDirectory = directory?.trim() ?? ""; + const key = normalizedDirectory + ? `remote::${normalizedBaseUrl}::${normalizedDirectory}` + : `remote::${normalizedBaseUrl}`; + return workspaceIdForKey(key); +} + +export function workspaceIdForOpenwork(hostUrl: string, workspaceId?: string | null): string { + const normalizedHostUrl = hostUrl.trim(); + const normalizedWorkspaceId = workspaceId?.trim() ?? ""; + const key = normalizedWorkspaceId + ? `openwork::${normalizedHostUrl}::${normalizedWorkspaceId}` + : `openwork::${normalizedHostUrl}`; + return workspaceIdForKey(key); +} + +export function buildWorkspaceInfos( + workspaces: WorkspaceConfig[], + cwd: string, +): WorkspaceInfo[] { + return workspaces.map((workspace) => { + const rawPath = workspace.path?.trim() ?? ""; + const workspaceType = workspace.workspaceType ?? "local"; + const resolvedPath = rawPath ? resolve(cwd, rawPath) : ""; + const remoteType = workspace.remoteType; + const id = workspace.id?.trim() + || (workspaceType === "remote" + ? remoteType === "openwork" + ? workspaceIdForOpenwork(workspace.openworkHostUrl ?? workspace.baseUrl ?? "", workspace.openworkWorkspaceId) + : workspaceIdForRemote(workspace.baseUrl ?? "", workspace.directory) + : workspaceIdForPath(resolvedPath)); + const name = workspace.name?.trim() + || workspace.displayName?.trim() + || workspace.openworkWorkspaceName?.trim() + || basename(resolvedPath || workspace.directory?.trim() || workspace.baseUrl?.trim() || "Workspace"); + return { + id, + name, + path: resolvedPath, + preset: workspace.preset?.trim() || (workspaceType === "remote" ? "remote" : "starter"), + workspaceType, + remoteType, + baseUrl: workspace.baseUrl, + directory: workspace.directory, + displayName: workspace.displayName, + openworkHostUrl: workspace.openworkHostUrl, + openworkToken: workspace.openworkToken, + openworkWorkspaceId: workspace.openworkWorkspaceId, + openworkWorkspaceName: workspace.openworkWorkspaceName, + sandboxBackend: workspace.sandboxBackend, + sandboxRunId: workspace.sandboxRunId, + sandboxContainerName: workspace.sandboxContainerName, + opencodeUsername: workspace.opencodeUsername, + opencodePassword: workspace.opencodePassword, + }; + }); +} diff --git a/apps/server/tsconfig.json b/apps/server/tsconfig.json new file mode 100644 index 0000000000..d5509c4d5f --- /dev/null +++ b/apps/server/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "outDir": "dist", + "rootDir": "src", + "strict": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "types": ["bun-types", "node"] + }, + "include": ["src"] +} diff --git a/apps/share/.gitignore b/apps/share/.gitignore new file mode 100644 index 0000000000..41d3ae8e49 --- /dev/null +++ b/apps/share/.gitignore @@ -0,0 +1,7 @@ +node_modules/ +.next/ +.vercel/ +.vercel +.env*.local +__tests__/ +*.tsbuildinfo diff --git a/apps/share/README.md b/apps/share/README.md new file mode 100644 index 0000000000..69da2b1aa2 --- /dev/null +++ b/apps/share/README.md @@ -0,0 +1,124 @@ +# OpenWork Share Service (Publisher) + +This is a Next.js publisher app for OpenWork "share link" bundles. + +It keeps the existing bundle APIs, but the public share surface now runs as a simple Next.js site backed by Vercel Blob. + +## Endpoints + +- `GET /` + - Human-friendly packaging page for OpenWork worker files. + - Supports drag/drop of skills, agents, commands, `opencode.json[c]`, and `openwork.json`. + - Previews the inferred bundle and publishes a share link. + +- `POST /v1/bundles` + - Accepts JSON bundle payloads. + - Stores bytes in Vercel Blob. + - Returns `{ "url": "https://share.openworklabs.com/b/" }`. + +- `POST /v1/package` + - Accepts `{ files: [{ path, name?, content }], preview?: boolean }`. + - Parses supported OpenWork files into the smallest useful bundle shape. + - Returns preview metadata when `preview` is `true`. + - Publishes the generated bundle and returns the share URL otherwise. + +- `GET /b/:id` + - Returns an HTML share page by default for browser requests. + - Includes an **Open in app** action that opens `openwork://import-bundle` with: + - `ow_bundle=` + - `ow_intent=new_worker` (desktop OpenWork converts single-skill bundles into a destination picker before import) + - `ow_source=share_service` + - Returns raw JSON for API/programmatic requests: + - send `Accept: application/json`, or + - append `?format=json`. + - The canonical raw endpoint is `/b/:id/data`. + - Supports `/b/:id/data?download=1` and the legacy `?format=json&download=1` compatibility path. + +## Bundle Types + +- `skill` + - A single skill install payload. +- `skills-set` + - A full skills pack (multiple skills) exported from a worker. + +## Packager input support + +- Skill markdown from `.opencode/skills//SKILL.md` +- Agent markdown from `.opencode/agents/*.md` +- Tool definitions from `.opencode/tools/*` +- Command markdown from `.opencode/commands/*.md` +- `opencode.json` / `opencode.jsonc` (portable project keys only: `agent`, `command`, `instructions`, `mcp`, `permission`, `plugin`, `share`, `tools`, `watcher`) +- `openwork.json` + +The packager rejects files that appear to contain secrets in shareable config. + +## Required Environment Variables + +- `BLOB_READ_WRITE_TOKEN` + - Vercel Blob token with read/write permissions. + +## Optional Environment Variables + +- `PUBLIC_BASE_URL` + - Default: `https://share.openworklabs.com` + - Used to construct the returned share URL. + +- `MAX_BYTES` + - Default: `262144` (256KB) + - Hard upload limit. + +- `OPENWORK_PUBLISHER_ALLOWED_ORIGINS` + - Optional comma-separated browser origins allowed to publish bundles. + - Defaults include the share origin, the hosted OpenWork app origin, and common local dev origins. + +- `LOCAL_BLOB_DIR` + - Optional local filesystem storage root for bundle JSON. + - When `BLOB_READ_WRITE_TOKEN` is unset in local/dev mode, the service falls back to local file storage automatically. + +## Local development + +For local testing you can use: + +```bash +pnpm install +pnpm --dir apps/share dev +``` + +Open `http://localhost:3000`. + +Without a `BLOB_READ_WRITE_TOKEN`, local development now stores bundles on disk in a local dev blob directory so publishing works out of the box. + +## Deploy + +Recommended project settings: + +- Root directory: `apps/share` +- Framework preset: Next.js +- Build command: `next build` +- Output directory: `.next` +- Install command: `pnpm install --frozen-lockfile` +- Enable Vercel BotID for the project and keep the bundle routes protected in `app/layout.tsx`. + +## Tests + +```bash +pnpm --dir apps/share test +``` + +## Quick checks + +```bash +# Human-friendly page +curl -i "http://localhost:3000/b/" -H "Accept: text/html" + +# Machine-readable payload (OpenWork parser path) +curl -i "http://localhost:3000/b//data" + +# Legacy compatibility path +curl -i "http://localhost:3000/b/?format=json" +``` + +## Notes + +- Links are public and unguessable (no auth, no encryption). +- Do not publish secrets in bundles. diff --git a/apps/share/app/api/health/route.ts b/apps/share/app/api/health/route.ts new file mode 100644 index 0000000000..5d885a53f2 --- /dev/null +++ b/apps/share/app/api/health/route.ts @@ -0,0 +1,5 @@ +export const runtime = "nodejs"; + +export function GET() { + return Response.json({ ok: true }); +} diff --git a/apps/share/app/api/v1/bundles/route.ts b/apps/share/app/api/v1/bundles/route.ts new file mode 100644 index 0000000000..5f63bf315f --- /dev/null +++ b/apps/share/app/api/v1/bundles/route.ts @@ -0,0 +1,89 @@ +import { storeBundleJson } from "../../../../server/_lib/blob-store.ts"; +import { buildCorsHeaders, rateLimitPublishRequest, validateTrustedOrigin, verifyShareBotProtection } from "../../../../server/_lib/publish-security.ts"; +import { buildBundleUrls, getEnv, validateBundlePayload } from "../../../../server/_lib/share-utils.ts"; +import { buildRequestLike } from "../../../../server/_lib/request-like.ts"; + +export const runtime = "nodejs"; + +function formatPublishError(error: unknown): string { + const message = error instanceof Error ? error.message : "Blob put failed"; + if (message.includes("BLOB_READ_WRITE_TOKEN") || message.includes("No token found")) { + return "Publishing requires BLOB_READ_WRITE_TOKEN in the server environment."; + } + return message; +} + +function jsonResponse(body: unknown, request: Request, status = 200): Response { + return new Response(JSON.stringify(body), { + status, + headers: { + ...buildCorsHeaders(request), + "Content-Type": "application/json" + } + }); +} + +export function OPTIONS(request: Request) { + return new Response(null, { + status: 204, + headers: buildCorsHeaders(request) + }); +} + +export async function POST(request: Request) { + const originCheck = validateTrustedOrigin(request); + if (!originCheck.ok) { + return jsonResponse({ message: originCheck.message }, request, originCheck.status); + } + + const rateLimit = rateLimitPublishRequest(request); + if (!rateLimit.ok) { + return new Response(JSON.stringify({ message: "Publishing is temporarily rate limited." }), { + status: 429, + headers: { + ...buildCorsHeaders(request), + "Content-Type": "application/json", + "X-Retry-After": String(rateLimit.retryAfterSeconds), + }, + }); + } + + const botProtection = await verifyShareBotProtection(request); + if (!botProtection.ok) { + return jsonResponse({ message: botProtection.message }, request, botProtection.status); + } + + const maxBytes = Number.parseInt(getEnv("MAX_BYTES", "262144"), 10); + const contentType = String(request.headers.get("content-type") ?? "").toLowerCase(); + if (!contentType.includes("application/json")) { + return jsonResponse({ message: "Expected application/json" }, request, 415); + } + + const rawJson = await request.text(); + if (!rawJson) { + return jsonResponse({ message: "Body is required" }, request, 400); + } + + if (Buffer.byteLength(rawJson, "utf8") > maxBytes) { + return jsonResponse({ message: "Bundle exceeds upload limit", maxBytes }, request, 413); + } + + const validation = validateBundlePayload(rawJson); + if (!validation.ok) { + return jsonResponse({ message: validation.message }, request, 422); + } + + try { + const { id } = await storeBundleJson(rawJson); + const urls = buildBundleUrls( + buildRequestLike({ + headers: request.headers + }), + id + ); + + return jsonResponse({ url: urls.shareUrl }, request); + } catch (error) { + return jsonResponse({ message: formatPublishError(error) }, request, 500); + } +} diff --git a/apps/share/app/api/v1/package/route.ts b/apps/share/app/api/v1/package/route.ts new file mode 100644 index 0000000000..13500f3af1 --- /dev/null +++ b/apps/share/app/api/v1/package/route.ts @@ -0,0 +1,101 @@ +import { storeBundleJson } from "../../../../server/_lib/blob-store.ts"; +import { packageOpenworkFiles } from "../../../../server/_lib/package-openwork-files.ts"; +import { buildCorsHeaders, rateLimitPublishRequest, validateTrustedOrigin, verifyShareBotProtection } from "../../../../server/_lib/publish-security.ts"; +import { buildBundleUrls, getEnv } from "../../../../server/_lib/share-utils.ts"; +import { buildRequestLike } from "../../../../server/_lib/request-like.ts"; + +export const runtime = "nodejs"; + +function formatPublishError(error: unknown): string { + const message = error instanceof Error ? error.message : "Failed to package files"; + if (message.includes("BLOB_READ_WRITE_TOKEN") || message.includes("No token found")) { + return "Publishing requires BLOB_READ_WRITE_TOKEN in the server environment."; + } + return message; +} + +function jsonResponse(body: unknown, request: Request, status = 200): Response { + return new Response(JSON.stringify(body), { + status, + headers: { + ...buildCorsHeaders(request), + "Content-Type": "application/json" + } + }); +} + +export function OPTIONS(request: Request) { + return new Response(null, { + status: 204, + headers: buildCorsHeaders(request) + }); +} + +export async function POST(request: Request) { + const originCheck = validateTrustedOrigin(request); + if (!originCheck.ok) { + return jsonResponse({ message: originCheck.message }, request, originCheck.status); + } + + const rateLimit = rateLimitPublishRequest(request); + if (!rateLimit.ok) { + return new Response(JSON.stringify({ message: "Publishing is temporarily rate limited." }), { + status: 429, + headers: { + ...buildCorsHeaders(request), + "Content-Type": "application/json", + "X-Retry-After": String(rateLimit.retryAfterSeconds), + }, + }); + } + + const botProtection = await verifyShareBotProtection(request); + if (!botProtection.ok) { + return jsonResponse({ message: botProtection.message }, request, botProtection.status); + } + + const maxBytes = Number.parseInt(getEnv("MAX_BYTES", "262144"), 10); + const contentType = String(request.headers.get("content-type") ?? "").toLowerCase(); + if (!contentType.includes("application/json")) { + return jsonResponse({ message: "Expected application/json" }, request, 415); + } + + const raw = await request.text(); + if (!raw) { + return jsonResponse({ message: "Body is required" }, request, 400); + } + + if (Buffer.byteLength(raw, "utf8") > maxBytes) { + return jsonResponse({ message: "Package request exceeds upload limit", maxBytes }, request, 413); + } + + let body: { preview?: boolean; [key: string]: unknown }; + try { + body = JSON.parse(raw); + } catch { + return jsonResponse({ message: "Invalid JSON" }, request, 422); + } + + try { + const packaged = packageOpenworkFiles(body); + if (body?.preview) { + return jsonResponse(packaged, request); + } + + const { id } = await storeBundleJson(JSON.stringify(packaged.bundle)); + const urls = buildBundleUrls( + buildRequestLike({ + headers: request.headers + }), + id + ); + + return jsonResponse({ + ...packaged, + url: urls.shareUrl, + id + }, request); + } catch (error) { + return jsonResponse({ message: formatPublishError(error) }, request, 422); + } +} diff --git a/apps/share/app/b/[id]/data/route.ts b/apps/share/app/b/[id]/data/route.ts new file mode 100644 index 0000000000..e8114097c1 --- /dev/null +++ b/apps/share/app/b/[id]/data/route.ts @@ -0,0 +1,38 @@ +import { fetchBundleJsonById } from "../../../../server/_lib/blob-store.ts"; +import { wantsDownload } from "../../../../server/_lib/share-utils.ts"; +import { buildRequestLike } from "../../../../server/_lib/request-like.ts"; +import type { NextRequest } from "next/server"; + +export const runtime = "nodejs"; + +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const routeParams = await params; + const id = String(routeParams?.id ?? "").trim(); + if (!id) { + return Response.json({ message: "id is required" }, { status: 400 }); + } + + try { + const { blob, rawBuffer } = await fetchBundleJsonById(id); + const requestLike = buildRequestLike({ + headers: request.headers, + searchParams: request.nextUrl.searchParams + }); + const responseHeaders = new Headers({ + Vary: "Accept", + "Cache-Control": "public, max-age=3600", + "Content-Type": blob.contentType || "application/json" + }); + + if (wantsDownload(requestLike)) { + responseHeaders.set("Content-Disposition", `attachment; filename="openwork-bundle-${id}.json"`); + } + + return new Response(rawBuffer as unknown as BodyInit, { + status: 200, + headers: responseHeaders + }); + } catch { + return Response.json({ message: "Not found" }, { status: 404 }); + } +} diff --git a/apps/share/app/b/[id]/page.tsx b/apps/share/app/b/[id]/page.tsx new file mode 100644 index 0000000000..362b1451ef --- /dev/null +++ b/apps/share/app/b/[id]/page.tsx @@ -0,0 +1,124 @@ +import type { Metadata } from "next"; +import { headers } from "next/headers"; +import { redirect } from "next/navigation"; + +import ShareBundlePage from "../../../components/share-bundle-page"; +import { getBundlePageProps } from "../../../server/b/get-bundle-page-props.ts"; +import { getGithubStars } from "../../../server/_lib/github-stars.ts"; +import { buildRequestLike } from "../../../server/_lib/request-like.ts"; + +async function loadBundlePageProps(id: string) { + const requestHeaders = await headers(); + return getBundlePageProps({ + id, + requestLike: buildRequestLike({ headers: requestHeaders }) + }); +} + +type BundlePageSearchParams = Record; + +function firstQueryValue(value: string | string[] | undefined): string { + return Array.isArray(value) ? String(value[0] ?? "") : String(value ?? ""); +} + +function wantsJsonBundleResponse(requestHeaders: Headers, searchParams: BundlePageSearchParams): boolean { + const format = firstQueryValue(searchParams.format).trim().toLowerCase(); + if (format === "json") return true; + + const accept = String(requestHeaders.get("accept") ?? "").toLowerCase(); + return accept.includes("application/json") && !accept.includes("text/html"); +} + +function buildBundleDataUrl(id: string, searchParams: BundlePageSearchParams): string { + const query = new URLSearchParams(); + const download = firstQueryValue(searchParams.download).trim(); + if (download) { + query.set("download", download); + } + + const suffix = query.toString(); + return `/b/${encodeURIComponent(id)}/data${suffix ? `?${suffix}` : ""}`; +} + +export async function generateMetadata({ params }: { params: Promise<{ id: string }> }): Promise { + const routeParams = await params; + const props = await loadBundlePageProps(routeParams?.id); + const pageTitle = props.missing ? "SKILL.md not found" : props.title; + const pageDescription = props.missing + ? "This share link does not exist anymore, or the bundle id is invalid." + : props.description; + + return { + title: pageTitle, + description: pageDescription, + alternates: { + canonical: props.canonicalUrl + }, + robots: props.missing + ? { + index: false, + follow: false, + googleBot: { + index: false, + follow: false + } + } + : undefined, + openGraph: { + type: "website", + siteName: "OpenWork Share", + title: pageTitle, + description: pageDescription, + url: props.canonicalUrl, + images: [ + { + url: props.ogImageUrls?.byVariant.facebook || props.ogImageUrl, + width: 1200, + height: 630, + alt: `${pageTitle} bundle preview` + } + ] + }, + twitter: { + card: "summary_large_image", + title: pageTitle, + description: pageDescription, + images: [ + { + url: props.twitterImageUrl || props.ogImageUrl, + alt: `${pageTitle} bundle preview` + } + ] + }, + other: props.missing + ? undefined + : { + "openwork:bundle-id": props.id!, + "openwork:bundle-type": props.bundleType!, + "openwork:schema-version": props.schemaVersion!, + "openwork:open-in-app-url": props.openInAppDeepLink! + } + }; +} + +export default async function BundlePage({ + params, + searchParams, +}: { + params: Promise<{ id: string }>; + searchParams: Promise; +}) { + const [routeParams, resolvedSearchParams, requestHeaders] = await Promise.all([params, searchParams, headers()]); + const id = String(routeParams?.id ?? "").trim(); + + if (id && wantsJsonBundleResponse(requestHeaders, resolvedSearchParams ?? {})) { + redirect(buildBundleDataUrl(id, resolvedSearchParams ?? {})); + } + + const props = await getBundlePageProps({ + id, + requestLike: buildRequestLike({ headers: requestHeaders }) + }); + const stars = await getGithubStars(); + return ; +} diff --git a/apps/share/app/layout.tsx b/apps/share/app/layout.tsx new file mode 100644 index 0000000000..68f6f99bf8 --- /dev/null +++ b/apps/share/app/layout.tsx @@ -0,0 +1,80 @@ +import "../styles/globals.css"; + +import type { Metadata } from "next"; +import { Inter, JetBrains_Mono } from "next/font/google"; +import Script from "next/script"; +import type { ReactNode } from "react"; +import { BotIdClient } from "botid/client"; + +import { DEFAULT_PUBLIC_BASE_URL } from "../server/_lib/share-utils.ts"; + +const inter = Inter({ + subsets: ["latin"], + display: "swap", + variable: "--font-inter" +}); + +const jetbrainsMono = JetBrains_Mono({ + subsets: ["latin"], + display: "swap", + variable: "--font-mono" +}); + +export const metadata: Metadata = { + metadataBase: new URL(DEFAULT_PUBLIC_BASE_URL), + title: { + default: "OpenWork Share", + template: "%s - OpenWork Share" + }, + description: "Publish OpenWork worker packages and shareable import links.", + icons: { icon: "/openwork-mark.svg" }, + openGraph: { + type: "website", + siteName: "OpenWork Share", + }, + twitter: { + card: "summary_large_image", + site: "@getopenwork", + }, +}; + +const defaultPosthogKey = "phc_4YnPTlDVYPjgwKvLuNxhbHjV5kadgvd7XLzVHWnCXAI"; +const defaultPosthogHost = "https://us.i.posthog.com"; +const posthogKey = + process.env.NEXT_PUBLIC_POSTHOG_KEY?.trim() || + process.env.NEXT_PUBLIC_POSTHOG_API_KEY?.trim() || + defaultPosthogKey; +const posthogHost = (process.env.NEXT_PUBLIC_POSTHOG_HOST ?? defaultPosthogHost).trim(); + +const posthogBootstrap = posthogKey + ? `!function(t,e){var o,n,p,r;e.__SV||(window.posthog&&window.posthog.__loaded)||(window.posthog=e,e._i=[],e.init=function(i,s,a){function g(t,e){var o=e.split(".");2==o.length&&(t=t[o[0]],e=o[1]),t[e]=function(){t.push([e].concat(Array.prototype.slice.call(arguments,0)))}}(p=t.createElement("script")).type="text/javascript",p.crossOrigin="anonymous",p.async=!0,p.src=s.api_host.replace(".i.posthog.com","-assets.i.posthog.com")+"/static/array.js",(r=t.getElementsByTagName("script")[0]).parentNode.insertBefore(p,r);var u=e;for(void 0!==a?u=e[a]=[]:a="posthog",u.people=u.people||[],u.toString=function(t){var e="posthog";return"posthog"!==a&&(e+="."+a),t||(e+=" (stub)"),e},u.people.toString=function(){return u.toString(1)+".people (stub)"},o="init capture identify alias reset register unregister setPersonProperties".split(" "),n=0;n + + + {posthogBootstrap ? ( + skill", + description: "Install me", + trigger: "daily", + content: "# Skill\nHello", + }); + + const html = renderBundlePage({ + id: "01TEST", + rawJson, + req: makeReq({ accept: "text/html", host: "share.openworklabs.com" }), + }); + + assert.match(html, /data-openwork-share="true"/); + assert.match(html, /data-openwork-bundle-type="skill"/); + assert.match(html, /meta name="openwork:bundle-id" content="01TEST"/); + assert.match(html, /\/b\/01TEST\/data/); + assert.match(html, /openwork:\/\/import-bundle\?/); + assert.match(html, /ow_bundle=https%3A%2F%2Fshare\.openworklabs\.com%2Fb%2F01TEST/); + assert.match(html, /ow_intent=new_worker/); + assert.match(html, /ow_source=share_service/); + assert.match(html, /id="openwork-bundle-json" type="application\/json"/); + assert.match(html, /demo \\u003c\/script\\u003e skill/); + assert.doesNotMatch(html, /Open in app to choose where to add this skill\./); + assert.doesNotMatch(html, /Bundle details/); + assert.doesNotMatch(html, /Raw endpoints/); + assert.match(html, /skill\.md/); + assert.match(html, /Open in OpenWork app/); + assert.match(html, /Open in an OpenWork den/); + assert.doesNotMatch(html, /Open in web app/); + assert.doesNotMatch(html, /Copy share link/); +}); diff --git a/apps/share/server/b/render-bundle-page.ts b/apps/share/server/b/render-bundle-page.ts new file mode 100644 index 0000000000..1522d1f148 --- /dev/null +++ b/apps/share/server/b/render-bundle-page.ts @@ -0,0 +1,473 @@ +import { + OPENWORK_DOWNLOAD_URL, + SHARE_EASE, + buildBundleNarrative, + buildBundleUrls, + buildOgImageUrls, + buildOpenInAppUrls, + collectBundleItems, + escapeHtml, + escapeJsonForScript, + humanizeType, + parseBundle, + wantsDownload, +} from "../_lib/share-utils.ts"; +import type { RequestLike } from "../_lib/types.ts"; + +export { buildBundleUrls, wantsDownload } from "../_lib/share-utils.ts"; + +export function renderBundlePage({ id, rawJson, req }: { id: string; rawJson: string; req: RequestLike }): string { + const bundle = parseBundle(rawJson); + const urls = buildBundleUrls(req, id); + const ogImageUrls = buildOgImageUrls(req, id); + const ogImageUrl = ogImageUrls.default; + const { openInAppDeepLink } = buildOpenInAppUrls(urls.shareUrl, { + label: bundle.name || "Shared worker package", + }); + + const schemaVersion = bundle.schemaVersion == null ? "unknown" : String(bundle.schemaVersion); + const typeLabel = humanizeType(bundle.type); + const title = bundle.name || `OpenWork ${typeLabel}`; + const description = bundle.description || buildBundleNarrative(bundle); + const items = collectBundleItems(bundle, 8); + const compactItem = bundle.type === "skill" ? "skill.md" : items[0]?.name || "OpenWork bundle"; + + return ` + + + + + ${escapeHtml(title)} - OpenWork Share + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+

${escapeHtml(title)} ready

+

${escapeHtml(description)}

+ +
+ +
+
+
+
+
+
+
+
+
OpenWork
+
+
+
+

Skills:

+
+
${escapeHtml(compactItem)}
+
+
+
+
+
+
+ +
+
+
+
01Open the bundle in OpenWork
+
02Choose the destination worker
+
03Happy OpenWorking!
+
+
+
+ +
+ + + + +`; +} diff --git a/apps/share/server/health.ts b/apps/share/server/health.ts new file mode 100644 index 0000000000..332c408581 --- /dev/null +++ b/apps/share/server/health.ts @@ -0,0 +1,7 @@ +import type { IncomingMessage, ServerResponse } from "node:http"; + +export default function handler(_req: IncomingMessage, res: ServerResponse): void { + res.setHeader("Content-Type", "application/json"); + res.statusCode = 200; + res.end(JSON.stringify({ ok: true })); +} diff --git a/apps/share/server/og/[id].ts b/apps/share/server/og/[id].ts new file mode 100644 index 0000000000..f6c3b2a647 --- /dev/null +++ b/apps/share/server/og/[id].ts @@ -0,0 +1,46 @@ +import type { IncomingMessage, ServerResponse } from "node:http"; + +import { fetchBundleJsonById } from "../_lib/blob-store.ts"; +import { parseOgImageVariant } from "../_lib/og-image-variants.ts"; +import { renderBundleOgImage, renderRootOgImage } from "../_lib/render-og-image.ts"; +import { setCors } from "../_lib/share-utils.ts"; + +interface LegacyApiRequest extends IncomingMessage { + method?: string; + query?: Record; +} + +interface LegacyApiResponse extends ServerResponse { + status(code: number): LegacyApiResponse; + json(body: unknown): void; + send(body: string): void; +} + +export default async function handler(req: LegacyApiRequest, res: LegacyApiResponse): Promise { + setCors(res, { methods: "GET,OPTIONS", headers: "Content-Type,Accept" }); + if (req.method === "OPTIONS") { + res.status(204).end(); + return; + } + if (req.method !== "GET") { + res.status(405).json({ message: "Method not allowed" }); + return; + } + + const id = String(req.query?.id ?? "root").trim() || "root"; + const variant = parseOgImageVariant(req.query?.variant); + res.setHeader("Content-Type", "image/svg+xml; charset=utf-8"); + res.setHeader("Cache-Control", id === "root" ? "public, max-age=3600" : "public, max-age=3600, stale-while-revalidate=86400"); + + if (id === "root") { + res.status(200).send(renderRootOgImage(variant)); + return; + } + + try { + const { rawJson } = await fetchBundleJsonById(id); + res.status(200).send(renderBundleOgImage({ id, rawJson, variant })); + } catch { + res.status(404).send(renderRootOgImage(variant)); + } +} diff --git a/apps/share/server/v1/bundles.ts b/apps/share/server/v1/bundles.ts new file mode 100644 index 0000000000..7dd9c0d9ff --- /dev/null +++ b/apps/share/server/v1/bundles.ts @@ -0,0 +1,113 @@ +import type { IncomingMessage, ServerResponse } from "node:http"; + +import { storeBundleJson } from "../_lib/blob-store.ts"; +import { buildCanonicalRequest, buildRequestLike } from "../_lib/request-like.ts"; +import { buildCorsHeaders, rateLimitPublishRequest, validateTrustedOrigin, verifyShareBotProtection } from "../_lib/publish-security.ts"; +import { buildBundleUrls, getEnv, readBody, validateBundlePayload } from "../_lib/share-utils.ts"; + +interface LegacyApiRequest extends IncomingMessage { + method?: string; + headers: Record; +} + +interface LegacyApiResponse extends ServerResponse { + status(code: number): LegacyApiResponse; + json(body: unknown): void; +} + +function formatPublishError(error: unknown): string { + const message = error instanceof Error ? error.message : "Blob put failed"; + if (message.includes("BLOB_READ_WRITE_TOKEN") || message.includes("No token found")) { + return "Publishing requires BLOB_READ_WRITE_TOKEN in the server environment."; + } + return message; +} + +function applyHeaders(res: LegacyApiResponse, headers: Record): void { + for (const [name, value] of Object.entries(headers)) { + res.setHeader(name, value); + } +} + +function json(res: LegacyApiResponse, body: unknown, status = 200, headers: Record = {}): void { + applyHeaders(res, { + ...headers, + "Content-Type": "application/json", + }); + res.status(status).end(JSON.stringify(body)); +} + +export default async function handler(req: LegacyApiRequest, res: LegacyApiResponse): Promise { + const request = buildCanonicalRequest({ + pathname: "/v1/bundles", + method: req.method, + headers: req.headers, + }); + + applyHeaders(res, buildCorsHeaders(request)); + if (req.method === "OPTIONS") { + res.status(204).end(); + return; + } + + const originCheck = validateTrustedOrigin(request); + if (!originCheck.ok) { + json(res, { message: originCheck.message }, originCheck.status); + return; + } + + const rateLimit = rateLimitPublishRequest(request); + if (!rateLimit.ok) { + json( + res, + { message: "Publishing is temporarily rate limited." }, + 429, + { "X-Retry-After": String(rateLimit.retryAfterSeconds) }, + ); + return; + } + + const botProtection = await verifyShareBotProtection(request); + if (!botProtection.ok) { + json(res, { message: botProtection.message }, botProtection.status); + return; + } + + if (req.method !== "POST") { + json(res, { message: "Method not allowed" }, 405); + return; + } + + const maxBytes = Number.parseInt(getEnv("MAX_BYTES", "5242880"), 10); + + const contentType = String(req.headers["content-type"] ?? "").toLowerCase(); + if (!contentType.includes("application/json")) { + json(res, { message: "Expected application/json" }, 415); + return; + } + + const raw = await readBody(req); + if (!raw || raw.length === 0) { + json(res, { message: "Body is required" }, 400); + return; + } + if (raw.length > maxBytes) { + json(res, { message: "Bundle exceeds upload limit", maxBytes }, 413); + return; + } + + const rawJson = raw.toString("utf8"); + const validation = validateBundlePayload(rawJson); + if (!validation.ok) { + json(res, { message: validation.message }, 422); + return; + } + + try { + const { id } = await storeBundleJson(rawJson); + const urls = buildBundleUrls(buildRequestLike({ headers: request.headers }), id); + json(res, { url: urls.shareUrl }); + } catch (e) { + json(res, { message: formatPublishError(e) }, 500); + } +} diff --git a/apps/share/server/v1/package.ts b/apps/share/server/v1/package.ts new file mode 100644 index 0000000000..c9bb88debd --- /dev/null +++ b/apps/share/server/v1/package.ts @@ -0,0 +1,124 @@ +import type { IncomingMessage, ServerResponse } from "node:http"; + +import { storeBundleJson } from "../_lib/blob-store.ts"; +import { packageOpenworkFiles } from "../_lib/package-openwork-files.ts"; +import { buildCanonicalRequest, buildRequestLike } from "../_lib/request-like.ts"; +import { buildCorsHeaders, rateLimitPublishRequest, validateTrustedOrigin, verifyShareBotProtection } from "../_lib/publish-security.ts"; +import { buildBundleUrls, getEnv, readBody } from "../_lib/share-utils.ts"; + +interface LegacyApiRequest extends IncomingMessage { + method?: string; + headers: Record; +} + +interface LegacyApiResponse extends ServerResponse { + status(code: number): LegacyApiResponse; + json(body: unknown): void; +} + +function formatPublishError(error: unknown): string { + const message = error instanceof Error ? error.message : "Failed to package files"; + if (message.includes("BLOB_READ_WRITE_TOKEN") || message.includes("No token found")) { + return "Publishing requires BLOB_READ_WRITE_TOKEN in the server environment."; + } + return message; +} + +function applyHeaders(res: LegacyApiResponse, headers: Record): void { + for (const [name, value] of Object.entries(headers)) { + res.setHeader(name, value); + } +} + +function json(res: LegacyApiResponse, body: unknown, status = 200, headers: Record = {}): void { + applyHeaders(res, { + ...headers, + "Content-Type": "application/json", + }); + res.status(status).end(JSON.stringify(body)); +} + +export default async function handler(req: LegacyApiRequest, res: LegacyApiResponse): Promise { + const request = buildCanonicalRequest({ + pathname: "/v1/package", + method: req.method, + headers: req.headers, + }); + + applyHeaders(res, buildCorsHeaders(request)); + if (req.method === "OPTIONS") { + res.status(204).end(); + return; + } + + const originCheck = validateTrustedOrigin(request); + if (!originCheck.ok) { + json(res, { message: originCheck.message }, originCheck.status); + return; + } + + const rateLimit = rateLimitPublishRequest(request); + if (!rateLimit.ok) { + json( + res, + { message: "Publishing is temporarily rate limited." }, + 429, + { "X-Retry-After": String(rateLimit.retryAfterSeconds) }, + ); + return; + } + + const botProtection = await verifyShareBotProtection(request); + if (!botProtection.ok) { + json(res, { message: botProtection.message }, botProtection.status); + return; + } + + if (req.method !== "POST") { + json(res, { message: "Method not allowed" }, 405); + return; + } + + const maxBytes = Number.parseInt(getEnv("MAX_BYTES", "5242880"), 10); + const contentType = String(req.headers["content-type"] ?? "").toLowerCase(); + if (!contentType.includes("application/json")) { + json(res, { message: "Expected application/json" }, 415); + return; + } + + const raw = await readBody(req); + if (!raw || raw.length === 0) { + json(res, { message: "Body is required" }, 400); + return; + } + if (raw.length > maxBytes) { + json(res, { message: "Package request exceeds upload limit", maxBytes }, 413); + return; + } + + let body: { preview?: boolean; [key: string]: unknown }; + try { + body = JSON.parse(raw.toString("utf8")); + } catch { + json(res, { message: "Invalid JSON" }, 422); + return; + } + + try { + const packaged = packageOpenworkFiles(body); + if (body?.preview) { + json(res, packaged); + return; + } + + const { id } = await storeBundleJson(JSON.stringify(packaged.bundle)); + const urls = buildBundleUrls(buildRequestLike({ headers: request.headers }), id); + json(res, { + ...packaged, + url: urls.shareUrl, + id, + }); + } catch (error) { + json(res, { message: formatPublishError(error) }, 422); + } +} diff --git a/apps/share/styles/globals.css b/apps/share/styles/globals.css new file mode 100644 index 0000000000..4bbe593ec4 --- /dev/null +++ b/apps/share/styles/globals.css @@ -0,0 +1,1976 @@ +@font-face { + font-family: "FK Raster Roman Compact Smooth"; + src: url("https://openworklabs.com/fonts/FKRasterRomanCompact-Smooth.woff2") format("woff2"); + font-weight: 400; + font-style: normal; + font-display: swap; +} + +:root { + color-scheme: light; + --ow-bg: #f6f9fc; + --ow-ink: #011627; + --ow-muted: #5f6b7a; + --ow-card: rgba(255, 255, 255, 0.78); + --ow-card-strong: rgba(255, 255, 255, 0.94); + --ow-border: rgba(255, 255, 255, 0.76); + --ow-border-soft: rgba(148, 163, 184, 0.18); + --ow-shadow: 0 24px 70px -28px rgba(15, 23, 42, 0.18); + --ow-shadow-soft: 0 20px 50px -24px rgba(15, 23, 42, 0.12); + --ow-shadow-strong: 0 28px 80px -28px rgba(15, 23, 42, 0.26); + --ow-primary: #011627; + --ow-primary-hover: rgb(110, 110, 110); + --ow-ease: cubic-bezier(0.31, 0.325, 0, 0.92); + --ow-skill: linear-gradient(135deg, #f97316, #facc15); + --ow-agent: linear-gradient(135deg, #1d4ed8, #60a5fa); + --ow-mcp: linear-gradient(135deg, #0f766e, #2dd4bf); + --ow-command: linear-gradient(135deg, #7c3aed, #c084fc); + --ow-mono: var(--font-mono, ui-monospace), SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; +} + +* { + box-sizing: border-box; +} + +html, +body, +#__next { + min-height: 100%; +} + +body { + margin: 0; + font-family: var(--font-inter, Inter), "Segoe UI", "Helvetica Neue", sans-serif; + color: var(--ow-ink); + background-color: #f6f9fc; + overflow-x: hidden; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +body::before { + display: none; +} + +.grain-container { + position: absolute; + inset: 0; + overflow: hidden; +} + +a, +button, +input, +textarea { + font: inherit; +} + +a { + color: inherit; +} + +.mono { + font-family: var(--ow-mono); +} + +.shell { + position: relative; + z-index: 1; + width: min(100%, 1180px); + margin: 0 auto; + padding: 28px 18px 54px; +} + +.nav { + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; + margin-bottom: 28px; +} + +.brand, +.nav-links a, +.button-primary, +.button-secondary, + .hero-artifact, + .surface-shell, + .surface-soft, + .app-window, + .artifact-window, + .simple-app, + .drop-zone, + .selection-item, + .story-card, + .result-card, + .status-card, + .share-card, + .share-actions-bar, + .share-link-bar { + transition: + background-color 300ms var(--ow-ease), + border-color 300ms var(--ow-ease), + color 300ms var(--ow-ease), + box-shadow 300ms var(--ow-ease), + transform 300ms var(--ow-ease); +} + +.brand { + display: inline-flex; + align-items: center; + gap: 10px; + margin: 0; + padding: 0; + text-decoration: none; + color: var(--ow-ink); + font-size: 1.2rem; + font-weight: 600; + letter-spacing: -0.02em; + transition: opacity 300ms var(--ow-ease); +} + +.brand:hover { + opacity: 0.8; +} + +.brand-mark { + display: none; +} + +.nav-links, +.nav-actions, +.button-row, +.hero-actions { + display: flex; + flex-wrap: wrap; + gap: 12px; + align-items: center; +} + +.button-secondary { + display: inline-flex; + align-items: center; + justify-content: center; + gap: 10px; + min-height: 44px; + padding: 0 18px; + border-radius: 999px; + text-decoration: none; + background: rgba(255, 255, 255, 0.66); + border: 1px solid rgba(255, 255, 255, 0.82); + box-shadow: 0 18px 44px -34px rgba(15, 23, 42, 0.28); + backdrop-filter: blur(12px); + color: var(--ow-ink); +} + +.share-github-button { + background: #ffffff; +} + +.nav-links a { + display: inline-flex; + align-items: center; + justify-content: center; + gap: 10px; + min-height: 44px; + padding: 0 18px; + text-decoration: none; + color: var(--ow-ink); + font-weight: 500; + transition: color 300ms var(--ow-ease); +} + +.nav-links a:hover { + color: var(--ow-primary-hover); +} + +.button-secondary:hover { + background: rgb(242, 242, 242); + box-shadow: + rgba(0, 0, 0, 0.06) 0 0 0 1px, + rgba(0, 0, 0, 0.04) 0 1px 2px 0, + rgba(0, 0, 0, 0.04) 0 2px 4px 0; +} + +.button-primary { + display: inline-flex; + align-items: center; + justify-content: center; + gap: 10px; + min-height: 48px; + padding: 0 22px; + border-radius: 999px; + border: none; + cursor: pointer; + text-decoration: none; + color: #fff; + background: var(--ow-primary); + box-shadow: 0 22px 46px -28px rgba(1, 22, 39, 0.7); + font-weight: 600; +} + +.button-primary:hover { + background: var(--ow-primary-hover); + transform: translateY(-1px); +} + +.button-primary:disabled, +.button-secondary:disabled { + cursor: not-allowed; + opacity: 0.58; + transform: none; +} + +.hero-layout { + display: grid; + gap: 24px; + grid-template-columns: minmax(0, 1.05fr) minmax(320px, 0.95fr); + align-items: stretch; +} + +.hero-layout-share { + grid-template-columns: minmax(0, 1fr); + align-items: start; +} + +.hero-copy, +.simple-app, +.paste-panel, +.result-card, +.status-card { + display: grid; + gap: 18px; +} + +.hero-copy h1, +.status-card h1 { + margin: 0; + font-size: clamp(3rem, 8vw, 5rem); + line-height: 0.94; + letter-spacing: -0.06em; +} + +.hero-copy h1 em, +.status-card h1 em { + font-family: "FK Raster Roman Compact Smooth", Georgia, serif; + font-style: normal; + font-weight: 400; +} + +.hero-body, +.hero-note, +.simple-app-copy, +.story-card p, +.result-card p, +.status-card p { + margin: 0; + color: var(--ow-muted); + line-height: 1.6; +} + +.hero-body { + max-width: 40rem; + font-size: 1.05rem; +} + +.hero-note { + font-size: 13px; +} + +.hero-proof-strip { + display: flex; + flex-wrap: wrap; + gap: 10px; +} + +.surface-chip { + display: inline-flex; + width: fit-content; + min-height: 34px; + align-items: center; + padding: 0 14px; + border-radius: 999px; + background: rgba(255, 255, 255, 0.84); + border: 1px solid rgba(255, 255, 255, 0.88); + box-shadow: 0 14px 34px -28px rgba(15, 23, 42, 0.18); + font-size: 12px; + font-weight: 600; + letter-spacing: 0.04em; + color: var(--ow-ink); +} + +.eyebrow { + display: inline-flex; + width: fit-content; + min-height: 34px; + align-items: center; + padding: 0 14px; + border-radius: 999px; + background: rgba(255, 255, 255, 0.9); + border: 1px solid rgba(255, 255, 255, 0.95); + box-shadow: 0 14px 34px -28px rgba(15, 23, 42, 0.28); + font-size: 12px; + font-weight: 700; + letter-spacing: 0.16em; + text-transform: uppercase; + color: var(--ow-muted); +} + +.hero-artifact, +.status-card { + position: relative; + overflow: hidden; + border-radius: 40px; + border: 1px solid var(--ow-border); + background: linear-gradient(180deg, rgba(255, 255, 255, 0.9), rgba(255, 255, 255, 0.72)); + box-shadow: var(--ow-shadow-strong); + backdrop-filter: blur(22px); + padding: clamp(24px, 4vw, 40px); +} + +.hero-artifact-share { + background: #ffffff; + padding: 18px; +} + +.hero-artifact-published { + padding: 0; + border: 0; + background: transparent; + box-shadow: none; + backdrop-filter: none; + overflow: visible; +} + +.simple-app, +.app-window, +.artifact-window, +.surface-shell, +.surface-soft { + position: relative; + z-index: 1; + border-radius: 32px; + background: var(--ow-card-strong); + border: 1px solid rgba(255, 255, 255, 0.72); + box-shadow: var(--ow-shadow-soft); +} + +.surface-soft { + background: rgba(255, 255, 255, 0.92); +} + +.surface-shell { + background: linear-gradient(180deg, rgba(255, 255, 255, 0.96), rgba(255, 255, 255, 0.86)); +} + +.simple-app { + padding: 24px; +} + +.artifact-grid { + display: grid; + grid-template-columns: minmax(0, 1.12fr) minmax(280px, 0.88fr); + gap: 18px; + align-items: start; +} + +.share-cards-grid { + display: grid; + grid-template-columns: minmax(0, 1fr) minmax(320px, 1.1fr); + gap: 18px; + align-items: stretch; +} + +.share-home-stack { + display: grid; + gap: 18px; +} + +.share-bundle-stack { + display: grid; + gap: 18px; + margin-top: 26px; +} + +.share-bundle-simple-stack { + margin-top: 22px; +} + +.share-bundle-stack.has-sidebar { + grid-template-columns: minmax(260px, 320px) minmax(0, 1fr); + align-items: start; +} + +.share-bundle-toolbar { + margin-top: 4px; + padding: 18px 20px; +} + +.share-bundle-hero-card { + display: grid; + gap: 20px; + grid-template-columns: minmax(0, 1.2fr) minmax(260px, 0.8fr); + align-items: start; + padding: 22px; +} + +.share-bundle-hero-copy { + display: grid; + gap: 16px; + align-content: start; +} + +.share-bundle-summary, +.share-bundle-meta-card { + padding: 18px; +} + +.share-bundle-sidebar { + display: grid; + gap: 14px; + align-content: start; +} + +.share-bundle-metadata-list { + gap: 12px; +} + +.share-bundle-actions .button-primary, +.share-bundle-actions .button-secondary { + min-height: 64px; + padding: 0 30px; + font-size: 1.05rem; +} + +.share-bundle-editor-simple { + min-height: 660px; +} + +.share-bundle-grid { + align-items: stretch; +} + +.share-card { + backdrop-filter: blur(22px); + box-shadow: var(--ow-shadow); +} + +.share-actions-bar { + display: grid; + gap: 12px; + align-content: start; + padding: 20px 22px; + border-radius: 28px; + background: rgba(255, 255, 255, 0.92); + border: 1px solid rgba(255, 255, 255, 0.72); + box-shadow: var(--ow-shadow-soft); + backdrop-filter: blur(22px); +} + +.publish-action, +.publish-result { + display: grid; + gap: 10px; + min-height: 118px; + align-content: start; +} + +.publish-hint { + margin: 0; + display: flex; + align-items: center; + min-height: 52px; + font-size: 13px; + color: var(--ow-muted); + line-height: 1.5; + padding: 0 16px; +} + +.publish-button { + width: 100%; +} + +.package-card { + display: grid; + gap: 18px; + padding: 22px; + align-content: start; +} + +.share-home-card-header { + display: flex; + flex-wrap: wrap; + justify-content: space-between; + gap: 16px; + align-items: flex-start; +} + +.preview-panel { + grid-column: 2; + grid-row: 1; + display: flex; + flex-direction: column; + min-height: 550px; + overflow: hidden; + contain: size; +} + +.share-preview-panel { + min-height: 560px; +} + +.share-home-preview { + grid-column: auto; + grid-row: auto; + min-height: 620px; + contain: initial; +} + +.share-bundle-editor { + grid-column: auto; + grid-row: auto; + min-height: 620px; + contain: initial; +} + +.preview-surface { + border-radius: 24px; + background: rgba(255, 255, 255, 0.94); + border: 1px solid rgba(148, 163, 184, 0.18); + box-shadow: var(--ow-shadow-soft); + backdrop-filter: blur(22px); + display: flex; + flex-direction: column; + overflow: hidden; + flex: 1; +} + +.preview-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 18px 22px; + border-bottom: 1px solid rgba(148, 163, 184, 0.12); +} + +.preview-eyebrow { + font-size: 14px; + font-weight: 700; + letter-spacing: 0.16em; + text-transform: uppercase; + color: var(--ow-muted); +} + +.preview-filename { + display: inline-flex; + align-items: center; + gap: 6px; + font-size: 12px; + font-weight: 500; + color: #94a3b8; + font-family: var(--ow-mono); + letter-spacing: 0.02em; +} + +.preview-header-actions { + display: flex; + align-items: center; + gap: 8px; +} + +.share-frontmatter-preview { + display: grid; + gap: 4px; + padding: 16px 22px; + border-bottom: 1px solid rgba(148, 163, 184, 0.12); + background: rgba(248, 250, 252, 0.78); + font-size: 12px; + color: #64748b; +} + +.share-frontmatter-editor { + padding: 16px 22px; + border-bottom: 1px solid rgba(148, 163, 184, 0.12); + background: rgba(248, 250, 252, 0.78); +} + +.preview-editor-wrap { + display: grid; + grid-template-rows: 1fr; + flex: 1; + overflow-x: hidden; + overflow-y: auto; +} + +.preview-highlight, +.preview-editor { + grid-area: 1 / 1; + width: 100%; + min-width: 0; + padding: 20px 22px; + margin: 0; + border: none; + font-family: var(--ow-mono); + font-size: 13px; + line-height: 1.6; + tab-size: 2; + white-space: pre-wrap; + overflow-wrap: anywhere; + word-break: break-word; +} + +.share-preview-readonly { + min-height: 100%; +} + +.preview-highlight { + color: #334155; + background: transparent; + pointer-events: none; +} + +.preview-editor { + resize: none; + outline: none; + background: transparent; + color: transparent; + -webkit-text-fill-color: transparent; + caret-color: var(--ow-ink); + overflow: hidden; +} + +.preview-editor::placeholder { + color: rgba(148, 163, 184, 0.6); + -webkit-text-fill-color: rgba(148, 163, 184, 0.6); +} + +.preview-editor:focus { + outline: none; +} + +.preview-footer { + padding: 10px 22px; + font-size: 12px; + color: #94a3b8; + border-top: 1px solid rgba(148, 163, 184, 0.12); + font-family: var(--ow-mono); + text-align: right; +} + +/* Syntax highlighting — light-surface palette */ + +.hl-heading { + color: #0f172a; + font-weight: 700; +} + +.hl-punctuation { + color: #94a3b8; +} + +.hl-frontmatter { + color: #94a3b8; +} + +.hl-field { + color: #be123c; +} + +.hl-keyword { + color: #7c3aed; + font-weight: 600; +} + +.hl-type { + color: #0891b2; + font-style: italic; +} + +.hl-string { + color: #059669; +} + +.hl-number { + color: #d97706; +} + +.hl-bold { + color: #0f172a; + font-weight: 700; +} + +.hl-inline-code { + color: #0891b2; + background: rgba(8, 145, 178, 0.08); + border-radius: 3px; + padding: 0 3px; +} + +.hl-key { + color: #be123c; +} + +.hl-bracket { + color: #0f172a; + font-weight: 700; +} + +.hl-url { + color: #2563eb; + text-decoration: underline; + text-decoration-color: rgba(37, 99, 235, 0.35); +} + +.hl-comment { + color: #94a3b8; + font-style: italic; +} + + +.package-card-header, +.preview-panel-header { + display: flex; + flex-wrap: wrap; + justify-content: space-between; + gap: 12px; + align-items: flex-start; +} + +.selection-badge, +.preview-state { + display: inline-flex; + align-items: center; + min-height: 34px; + padding: 0 14px; + border-radius: 999px; + background: rgba(248, 250, 252, 0.9); + border: 1px solid rgba(148, 163, 184, 0.16); + color: var(--ow-muted); + font-size: 12px; + font-weight: 600; +} + +.preview-state.is-ready { + background: rgba(236, 253, 245, 0.95); + border-color: rgba(134, 239, 172, 0.4); + color: #166534; +} + +.selection-list { + display: grid; + gap: 8px; +} + +.inline-token { + display: inline; + padding: 0; + font-size: 0.88em; + font-weight: 600; + white-space: nowrap; + color: var(--ow-ink); + text-decoration: underline; + text-decoration-thickness: 2px; + text-underline-offset: 3px; +} + +.inline-token::before { + content: ""; + display: inline-block; + width: 7px; + height: 7px; + border-radius: 50%; + margin-right: 4px; + vertical-align: baseline; + position: relative; + top: -1px; +} + +.token-agent { + text-decoration-color: rgba(29, 78, 216, 0.4); +} +.token-agent::before { + background: var(--ow-agent); +} + +.token-skill { + text-decoration-color: rgba(249, 115, 22, 0.4); +} +.token-skill::before { + background: var(--ow-skill); +} + +.token-mcp { + text-decoration-color: rgba(15, 118, 110, 0.4); +} +.token-mcp::before { + background: var(--ow-mcp); +} + +.token-command { + text-decoration-color: rgba(124, 58, 237, 0.4); +} +.token-command::before { + background: var(--ow-command); +} + +.token-config { + text-decoration-color: rgba(71, 85, 105, 0.35); +} +.token-config::before { + background: #475569; +} + +.selection-item { + padding: 12px 14px; + border-radius: 18px; + background: rgba(248, 250, 252, 0.78); + border: 1px solid rgba(148, 163, 184, 0.14); +} + +.selection-item-name, +.summary-stat-value { + display: block; + font-weight: 600; + color: var(--ow-ink); +} + +.selection-item-path, +.summary-stat-label { + display: block; + margin-top: 4px; + font-size: 12px; + color: var(--ow-muted); +} + +.package-actions, +.link-stack { + display: grid; + gap: 12px; +} + +.preview-result-stack { + display: grid; + gap: 14px; +} + +.published-shell, +.published-layout { + display: grid; + gap: 20px; +} + +.published-shell { + width: 100%; +} + +.published-top-grid { + display: grid; + grid-template-columns: minmax(0, 1fr) minmax(0, 1fr); + gap: 16px; +} + +.published-card { + display: grid; + gap: 10px; + padding: 20px 22px; + border-radius: 24px; + background: rgba(255, 255, 255, 0.92); + border: 1px solid rgba(255, 255, 255, 0.72); + box-shadow: var(--ow-shadow-soft); + backdrop-filter: blur(22px); +} + +.published-card h3 { + margin: 0; + font-size: 20px; + font-weight: 600; + letter-spacing: -0.03em; + color: var(--ow-ink); +} + +.published-card-copy { + margin: 0; + color: var(--ow-muted); + line-height: 1.55; +} + +.share-link-bar { + padding: 16px 18px; + border-radius: 24px; + background: rgba(255, 255, 255, 0.92); + border: 1px solid rgba(255, 255, 255, 0.72); + box-shadow: var(--ow-shadow); + backdrop-filter: blur(22px); +} + +.share-link-bar-main { + display: grid; + gap: 14px; +} + +.share-link-row { + display: flex; + align-items: center; + min-height: 52px; + border-radius: 18px; + border: 1px solid rgba(148, 163, 184, 0.22); + background: rgba(255, 255, 255, 0.96); + transition: border-color 300ms var(--ow-ease), background-color 300ms var(--ow-ease); +} + +.share-link-row.is-copied { + border-color: rgba(34, 197, 94, 0.4); + background: rgba(236, 253, 245, 0.6); +} + +.share-link-inline { + flex: 1; + padding: 14px 16px; + border: none; + border-radius: 0; + background: transparent; + font-size: 13px; + word-break: break-word; +} + +.copy-icon-button { + display: flex; + align-items: center; + justify-content: center; + width: 40px; + height: 40px; + flex-shrink: 0; + margin-right: 8px; + border: none; + border-radius: 10px; + background: transparent; + color: var(--ow-muted); + cursor: pointer; + transition: color 300ms var(--ow-ease), background-color 300ms var(--ow-ease); +} + +.copy-icon-button:hover { + background: rgba(148, 163, 184, 0.1); + color: #3b82f6; +} + +.copy-icon-button.is-copied { + color: #22c55e; +} + +.copy-icon-button svg { + width: 16px; + height: 16px; +} + +.share-link-bar-actions { + display: flex; + flex-wrap: wrap; + align-items: center; + gap: 12px; +} + +.share-feedback-inline { + display: inline-flex; + align-items: center; + min-height: 36px; + padding: 0 12px; + border-radius: 999px; + background: rgba(248, 250, 252, 0.96); + border: 1px solid rgba(148, 163, 184, 0.18); + color: var(--ow-muted); + font-size: 12px; + font-weight: 600; +} + +.share-feedback-inline.is-success { + background: rgba(236, 253, 245, 0.95); + border-color: rgba(134, 239, 172, 0.4); + color: #166534; +} + +.input-method-grid { + display: grid; + grid-template-columns: 1fr 1fr; + gap: 14px; + align-items: stretch; +} + +.share-upload-grid { + display: grid; + grid-template-columns: minmax(0, 1.1fr) minmax(280px, 0.9fr); + gap: 16px; + align-items: stretch; +} + +.share-upload-grid-compact { + grid-template-columns: minmax(180px, 220px) minmax(220px, 1fr) minmax(260px, 1.2fr) auto; + gap: 12px; + align-items: center; +} + +.share-upload-row { + display: grid; + gap: 12px; + align-items: center; +} + +.share-upload-row-metadata { + grid-template-columns: minmax(220px, 320px) minmax(320px, 1fr); +} + +.share-upload-row-actions { + grid-template-columns: 1fr 1fr; +} + +.share-upload-actions { + display: grid; + gap: 12px; + align-content: start; +} + +.share-metadata-grid { + display: grid; + gap: 10px; +} + +.share-metadata-field { + display: grid; + gap: 6px; +} + +.share-metadata-field-inline { + grid-template-columns: auto minmax(0, 1fr); + align-items: center; + gap: 10px; +} + +.share-metadata-field-description { + min-width: 0; +} + +.share-metadata-key { + font-family: var(--ow-mono); + font-size: 12px; + font-weight: 700; + color: var(--ow-muted); +} + +.share-metadata-input { + min-height: 44px; + padding: 0 14px; + border-radius: 14px; + border: 1px solid rgba(148, 163, 184, 0.18); + background: rgba(255, 255, 255, 0.96); + color: var(--ow-ink); + outline: none; +} + +.share-metadata-input:focus { + border-color: rgba(37, 99, 235, 0.45); + box-shadow: 0 0 0 4px rgba(37, 99, 235, 0.08); +} + +.share-home-generate-button { + width: 100%; + min-width: 0; +} + +.share-home-generate-button:disabled { + cursor: not-allowed; + pointer-events: none; + transform: none; + background: #ffffff; + border: 1px solid rgba(148, 163, 184, 0.4); + color: var(--ow-muted); + opacity: 1; + box-shadow: none; +} + +.share-home-generate-button:disabled:hover { + background: #ffffff; + transform: none; + box-shadow: none; +} + +.share-upload-supporting-row { + display: flex; + flex-wrap: wrap; + align-items: center; + gap: 10px 14px; +} + +.skill-editor-inline-actions { + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: flex-end; + gap: 10px; +} + +.skill-save-button { + min-height: 38px; + padding: 0 18px; +} + +.save-status { + font-size: 12px; + color: var(--ow-muted); +} + +.save-status.is-error { + color: #b91c1c; +} + +.share-feedback, +.warning-card { + display: grid; + gap: 8px; + padding: 16px 18px; + border-radius: 24px; + background: rgba(255, 255, 255, 0.9); + border: 1px solid rgba(255, 255, 255, 0.72); + box-shadow: var(--ow-shadow-soft); + backdrop-filter: blur(22px); +} + +.share-feedback strong, +.warning-card h4 { + color: var(--ow-ink); +} + +.share-feedback strong, +.warning-card h4, +.simple-app-title, +.result-card h3 { + margin: 0; + font-size: 20px; + font-weight: 600; + letter-spacing: -0.03em; +} + +.simple-app-title { + margin-bottom: 0; +} + +.share-feedback span, +.warning-card p { + margin: 0; + color: var(--ow-muted); + line-height: 1.6; +} + +.share-feedback.is-success { + background: rgba(236, 253, 245, 0.95); + border-color: rgba(134, 239, 172, 0.4); +} + +.share-feedback-compact { + gap: 4px; + padding: 14px 16px; +} + +.package-status { + display: flex; + align-items: center; + gap: 8px; + padding: 10px 14px; + border-radius: 12px; + font-size: 13px; + font-weight: 500; +} + +.package-status-dot { + width: 8px; + height: 8px; + border-radius: 50%; + flex-shrink: 0; +} + +.severity-neutral, +.severity-success, +.severity-info, +.severity-warn { + background: rgba(148, 163, 184, 0.06); + color: var(--ow-muted); +} + +.severity-neutral .package-status-dot { + background: rgba(148, 163, 184, 0.35); +} + +.severity-success .package-status-dot { + background: var(--ow-mcp); +} + +.severity-info .package-status-dot { + background: var(--ow-skill); +} + +.severity-warn .package-status-dot { + background: #ef4444; +} + +.package-status-items { + margin: 0; + padding: 0 0 0 16px; + font-size: 12px; + color: var(--ow-muted); + display: grid; + gap: 2px; +} + +.summary-grid { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 10px; +} + +.summary-stat { + padding: 14px 16px; + border-radius: 18px; + background: rgba(248, 250, 252, 0.9); + border: 1px solid rgba(148, 163, 184, 0.14); +} + +.preview-note { + margin: 0; + font-size: 13px; + line-height: 1.6; + color: var(--ow-muted); +} + +.drop-zone { + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 14px; + min-height: 294px; + border: 2px dashed rgba(148, 163, 184, 0.3); + border-radius: 20px; + padding: 32px 24px; + text-align: center; + background: linear-gradient(180deg, rgba(248, 250, 252, 0.6), rgba(241, 245, 249, 0.4)); + cursor: pointer; +} + +.share-skill-drop-zone { + min-height: 240px; +} + +.share-skill-drop-zone-compact { + flex-direction: row; + justify-content: flex-start; + min-height: 56px; + padding: 10px 14px; + gap: 12px; + border-radius: 16px; + text-align: left; +} + +.drop-zone:hover, +.drop-zone.is-dragover { + border-color: rgba(37, 99, 235, 0.45); + background: linear-gradient(180deg, rgba(239, 246, 255, 0.9), rgba(219, 234, 254, 0.5)); +} + +.drop-zone.is-dragover { + transform: scale(1.01); +} + +.drop-zone[aria-busy="true"] { + cursor: progress; + opacity: 0.6; + pointer-events: none; +} + +.clipboard-egg-button { + display: flex; + align-items: center; + justify-content: center; + width: 36px; + height: 36px; + flex-shrink: 0; + border: none; + border-radius: 10px; + background: transparent; + color: #64748b; + cursor: pointer; + transition: background-color 300ms var(--ow-ease), color 300ms var(--ow-ease); +} + +.clipboard-egg-button:hover { + background: rgba(100, 116, 139, 0.08); +} + +.clipboard-egg-inline { + width: 20px; + height: 20px; + padding: 0; + border-radius: 6px; + background: transparent; + color: currentColor; +} + +.clipboard-egg-inline:hover { + background: rgba(100, 116, 139, 0.08); +} + +.preview-copy-button { + color: currentColor; +} + +.preview-copy-button:hover { + color: currentColor; +} + +.clipboard-egg-button svg { + width: 16px; + height: 16px; +} + +.clipboard-egg-inline svg { + width: 12px; + height: 12px; +} + +.drop-icon { + display: flex; + width: 48px; + height: 48px; + align-items: center; + justify-content: center; + border-radius: 14px; + background: rgba(255, 255, 255, 0.9); + color: rgba(100, 116, 139, 0.7); + box-shadow: 0 1px 3px rgba(15, 23, 42, 0.06); +} + +.drop-zone:hover .drop-icon, +.drop-zone.is-dragover .drop-icon { + color: #3b82f6; + background: #ffffff; + box-shadow: 0 4px 12px -4px rgba(59, 130, 246, 0.25); +} + +.drop-icon svg { + width: 22px; + height: 22px; +} + +.drop-text { + display: flex; + flex-direction: column; + gap: 4px; +} + +.drop-heading { + margin: 0; + font-size: 14px; + font-weight: 600; + color: var(--ow-ink); +} + +.drop-hint { + margin: 0; + font-size: 13px; + color: var(--ow-muted); +} + +.share-upload-note, +.share-inline-status { + margin: 0; + font-size: 13px; + line-height: 1.55; + color: var(--ow-muted); +} + +.drop-browse { + color: #3b82f6; + font-weight: 500; + text-decoration: underline; + text-decoration-color: rgba(59, 130, 246, 0.3); + text-underline-offset: 2px; +} + +.drop-zone:hover .drop-browse { + text-decoration-color: rgba(59, 130, 246, 0.6); +} + +.share-skill-drop-zone-compact .drop-icon { + width: 36px; + height: 36px; + border-radius: 10px; +} + +.share-skill-drop-zone-compact .drop-icon svg { + width: 18px; + height: 18px; +} + +.share-skill-drop-zone-compact .drop-text { + gap: 2px; +} + +.share-skill-drop-zone-compact .drop-heading { + font-size: 13px; +} + +.share-skill-drop-zone-compact .drop-hint { + font-size: 12px; +} + +.included-section h4 { + margin: 0; +} + +.paste-meta, +.status-area { + margin: 0; + font-size: 13px; + color: var(--ow-muted); +} + +.included-section { + text-align: left; + min-width: 0; +} + +.included-section-header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; + margin-bottom: 12px; +} + +.included-section h4 { + margin: 0; + font-size: 12px; + font-weight: 700; + letter-spacing: 0.12em; + text-transform: uppercase; + color: var(--ow-muted); +} + +.included-list { + display: grid; + grid-template-columns: 1fr; + gap: 10px; +} + +.metadata-list, +.warnings-list, +.url-stack { + display: grid; + gap: 10px; +} + +.share-story-grid { + display: grid; + grid-template-columns: repeat(3, minmax(0, 1fr)); + gap: 18px; + margin-top: 18px; +} + +.bundle-compact-strip { + align-self: start; + padding: 16px; + border-radius: 24px; +} + +.bundle-strip-header { + margin-bottom: 10px; + font-family: var(--ow-mono); + font-size: 12px; + font-weight: 700; + color: var(--ow-muted); +} + +.bundle-strip-list { + display: grid; + gap: 8px; + align-items: stretch; +} + +.bundle-strip-chip { + display: inline-flex; + align-items: center; + gap: 8px; + min-height: 40px; + padding: 0 14px; + border-radius: 999px; + background: rgba(248, 250, 252, 0.96); + border: 1px solid rgba(148, 163, 184, 0.16); + font-family: var(--ow-mono); + font-size: 12px; + color: var(--ow-ink); + cursor: pointer; + width: 100%; + justify-content: flex-start; + transition: border-color 300ms var(--ow-ease), background-color 300ms var(--ow-ease), transform 300ms var(--ow-ease); +} + +button.bundle-strip-chip { + appearance: none; +} + +.bundle-strip-chip:hover:not(:disabled) { + border-color: rgba(37, 99, 235, 0.32); + background: rgba(239, 246, 255, 0.96); + transform: translateY(-1px); +} + +.bundle-strip-chip.is-active, +.bundle-strip-chip:disabled { + background: rgba(225, 239, 255, 0.96); + border-color: rgba(37, 99, 235, 0.42); + cursor: default; +} + +.step-list { + display: grid; + gap: 12px; +} + +.step-row { + display: grid; + grid-template-columns: auto minmax(0, 1fr); + gap: 12px; + align-items: start; + color: var(--ow-ink); + line-height: 1.6; +} + +.step-bullet { + display: inline-flex; + min-width: 42px; + min-height: 42px; + align-items: center; + justify-content: center; + padding: 0 10px; + border-radius: 999px; + background: rgba(248, 250, 252, 0.92); + border: 1px solid rgba(148, 163, 184, 0.18); + color: var(--ow-muted); + font-size: 12px; + font-weight: 700; + letter-spacing: 0.08em; +} + +button.included-item { + font: inherit; + color: inherit; + text-align: left; + cursor: pointer; +} + +.included-item { + display: flex; + align-items: flex-start; + gap: 10px; + padding: 12px; + border-radius: 16px; + background: rgba(248, 250, 252, 0.92); + border: 1px solid rgba(148, 163, 184, 0.14); +} + +.included-item:hover:not(:disabled) { + border-color: rgba(37, 99, 235, 0.35); + background: rgba(240, 247, 255, 0.95); + transform: translateY(-1px); +} + +.included-item.is-active { + border-color: rgba(37, 99, 235, 0.5); + background: rgba(240, 247, 255, 0.98); + box-shadow: 0 0 0 2px rgba(37, 99, 235, 0.15); +} + +.included-item.is-dimmed { + opacity: 0.4; +} + +.included-item.is-dimmed:hover { + opacity: 0.75; +} + +.included-item:disabled { + cursor: default; +} + +.preview-filename-dot { + display: inline-block; + width: 8px; + height: 8px; + border-radius: 50%; + margin-right: 6px; + vertical-align: middle; + background: rgba(148, 163, 184, 0.38); + box-shadow: inset 0 0 0 1px rgba(148, 163, 184, 0.32); +} + +.item-left { + display: flex; + align-items: flex-start; + gap: 10px; +} + +.item-dot { + position: relative; + width: 28px; + height: 28px; + border-radius: 999px; + flex-shrink: 0; + overflow: hidden; +} + +.item-dot::after { + content: ""; + position: absolute; + inset: 0; + border-radius: inherit; + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='200' height='200'%3E%3Cfilter id='g'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.75' numOctaves='4' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23g)' opacity='0.45'/%3E%3C/svg%3E"); + background-size: cover; + mix-blend-mode: overlay; +} + +.item-text { + display: flex; + flex-direction: column; + gap: 2px; + min-width: 0; +} + +.dot-agent { + background: var(--ow-agent); +} + +.dot-skill { + background: var(--ow-skill); +} + +.dot-mcp { + background: var(--ow-mcp); +} + +.dot-command { + background: var(--ow-command); +} + +.dot-config { + background: #475569; +} + +.dot-pending { + background: rgba(148, 163, 184, 0.38); + box-shadow: inset 0 0 0 1px rgba(148, 163, 184, 0.32); +} + +.item-title { + font-size: 13px; + font-weight: 600; + line-height: 1.3; +} + +.item-meta { + font-size: 11px; + color: var(--ow-muted); +} + +.url-box { + width: 100%; + border-radius: 18px; + border: 1px solid rgba(148, 163, 184, 0.28); + background: rgba(255, 255, 255, 0.96); + color: var(--ow-ink); +} + +.paste-meta { + display: flex; + flex-wrap: wrap; + justify-content: flex-end; + gap: 12px; +} + +.status-area { + text-align: center; +} + +.app-window, +.artifact-window { + overflow: hidden; +} + +.app-window-header, +.artifact-window-header { + position: relative; + display: flex; + align-items: center; + justify-content: center; + padding: 12px 16px; + background: linear-gradient(to bottom, rgba(255, 255, 255, 0.9), rgba(255, 255, 255, 0.62)); + border-bottom: 1px solid rgba(255, 255, 255, 0.5); +} + +.mac-dots { + position: absolute; + left: 16px; + display: flex; + gap: 6px; +} + +.mac-dot { + width: 12px; + height: 12px; + border-radius: 50%; +} + +.mac-dot.red { + background: #ff5f56; + border: 1px solid rgba(224, 68, 62, 0.2); +} + +.mac-dot.yellow { + background: #ffbd2e; + border: 1px solid rgba(222, 161, 35, 0.2); +} + +.mac-dot.green { + background: #27c93f; + border: 1px solid rgba(26, 171, 41, 0.2); +} + +.app-window-title, +.artifact-window-title { + font-size: 12px; + font-weight: 500; + color: var(--ow-muted); +} + +.app-window-body, +.artifact-window-body { + padding: 24px; + background: #ffffff; +} + +.url-box { + padding: 16px; + font-size: 13px; + word-break: break-word; +} + +.metadata-row { + display: flex; + justify-content: space-between; + align-items: baseline; + gap: 12px; + font-size: 13px; +} + +.metadata-row dt, +.warnings-empty { + color: var(--ow-muted); +} + +.metadata-row dd { + margin: 0; + font-weight: 600; +} + +.warnings-list { + margin: 0; + padding-left: 20px; + color: #b91c1c; + line-height: 1.6; +} + + +.status-card { + max-width: 720px; + margin: 48px auto 0; +} + +.visually-hidden { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border: 0; +} + +.button-primary:focus-visible, +.button-secondary:focus-visible, +.drop-zone:focus-visible, +.included-item:focus-visible, +.preview-editor:focus-visible { + border-color: rgba(27, 41, 255, 0.55); + box-shadow: 0 0 0 0.22rem rgba(27, 41, 255, 0.16); + outline: none; +} + +@media (prefers-reduced-motion: reduce) { + *, + *::before, + *::after { + animation-duration: 0.01ms !important; + animation-iteration-count: 1 !important; + transition-duration: 0.01ms !important; + scroll-behavior: auto !important; + } +} + +@media (max-width: 960px) { + .nav, + .hero-layout, + .artifact-grid, + .share-cards-grid, + .share-story-grid { + grid-template-columns: 1fr; + } + + .share-card-full, + .preview-panel { + grid-column: 1; + grid-row: auto; + } + + .preview-panel { + min-height: 300px; + max-height: 50vh; + } + + .input-method-grid { + grid-template-columns: 1fr; + } + + .share-upload-grid { + grid-template-columns: 1fr; + } + + .share-upload-grid-compact { + grid-template-columns: 1fr 1fr; + align-items: stretch; + } + + .share-upload-row-metadata { + grid-template-columns: 1fr; + } + + .share-upload-row-actions { + grid-template-columns: 1fr 1fr; + } + + .share-home-generate-button { + width: 100%; + } + + .included-list { + grid-template-columns: 1fr; + } + + .published-top-grid { + grid-template-columns: 1fr; + } + + .nav { + flex-direction: column; + align-items: stretch; + } + + .nav-links, + .nav-actions { + justify-content: center; + } + + .hero-layout, + .artifact-grid, + .share-cards-grid { + display: grid; + } +} + +@media (max-width: 720px) { + .share-upload-grid-compact { + grid-template-columns: 1fr; + } + + .share-bundle-hero-card { + grid-template-columns: 1fr; + } + + .share-bundle-stack.has-sidebar { + grid-template-columns: 1fr; + } + + .share-metadata-field-inline { + grid-template-columns: 1fr; + gap: 6px; + } + + .share-upload-row-actions { + grid-template-columns: 1fr; + } + + .share-skill-drop-zone-compact { + justify-content: center; + text-align: center; + } + + .summary-grid { + grid-template-columns: 1fr; + } +} + +@media (max-width: 720px) { + .shell { + padding: 20px 14px 40px; + } + + .hero-copy h1, + .status-card h1 { + font-size: clamp(2.4rem, 14vw, 3.5rem); + } + + .simple-app, + .result-card, + .status-card, + .hero-artifact, + .share-card, + .share-actions-bar { + padding: 20px; + border-radius: 24px; + } + + .button-row, + .hero-actions, + .nav-links, + .nav-actions { + flex-direction: column; + align-items: stretch; + } + + .button-primary, + .button-secondary, + .nav-links a { + width: 100%; + } + + .share-link-bar-actions { + flex-direction: column; + align-items: stretch; + } + + .share-feedback-compact { + gap: 8px; + } + + .drop-zone { + min-height: 100px; + } +} diff --git a/apps/share/test-results/.last-run.json b/apps/share/test-results/.last-run.json new file mode 100644 index 0000000000..cbcc1fbac1 --- /dev/null +++ b/apps/share/test-results/.last-run.json @@ -0,0 +1,4 @@ +{ + "status": "passed", + "failedTests": [] +} \ No newline at end of file diff --git a/apps/share/tsconfig.json b/apps/share/tsconfig.json new file mode 100644 index 0000000000..bcfbb589eb --- /dev/null +++ b/apps/share/tsconfig.json @@ -0,0 +1,38 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "lib": [ + "ES2022", + "DOM", + "DOM.Iterable" + ], + "jsx": "react-jsx", + "strict": true, + "noEmit": true, + "allowImportingTsExtensions": true, + "esModuleInterop": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "isolatedModules": true, + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "allowJs": true + }, + "include": [ + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + ".next/dev/types/**/*.ts" + ], + "exclude": [ + "node_modules", + ".next", + "__tests__" + ] +} diff --git a/apps/share/vercel.json b/apps/share/vercel.json new file mode 100644 index 0000000000..a667db8cda --- /dev/null +++ b/apps/share/vercel.json @@ -0,0 +1,4 @@ +{ + "$schema": "https://openapi.vercel.sh/vercel.json", + "framework": "nextjs" +} diff --git a/apps/story-book/dev.log b/apps/story-book/dev.log new file mode 100644 index 0000000000..adaf41d974 --- /dev/null +++ b/apps/story-book/dev.log @@ -0,0 +1,9 @@ + +> @openwork/story-book@0.0.0 dev /Users/benjaminshafii/openwork-enterprise/_repos/openwork/apps/story-book +> vite + + + VITE v6.4.1 ready in 389 ms + + ➜ Local: http://localhost:5176/ + ➜ Network: use --host to expose diff --git a/apps/story-book/index.html b/apps/story-book/index.html new file mode 100644 index 0000000000..edcf036cf9 --- /dev/null +++ b/apps/story-book/index.html @@ -0,0 +1,31 @@ + + + + + + OpenWork Story Book + + + + + + + + +
+ + + diff --git a/apps/story-book/package.json b/apps/story-book/package.json new file mode 100644 index 0000000000..6691e608b6 --- /dev/null +++ b/apps/story-book/package.json @@ -0,0 +1,38 @@ +{ + "name": "@openwork/story-book", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview", + "typecheck": "tsc -p tsconfig.json --noEmit" + }, + "devDependencies": { + "@tailwindcss/vite": "^4.1.18", + "tailwindcss": "^4.1.18", + "typescript": "^5.6.3", + "vite": "^6.0.1", + "vite-plugin-solid": "^2.11.0" + }, + "packageManager": "pnpm@10.27.0", + "dependencies": { + "@codemirror/commands": "^6.8.0", + "@codemirror/lang-markdown": "^6.3.3", + "@codemirror/language": "^6.11.0", + "@codemirror/state": "^6.5.2", + "@codemirror/view": "^6.38.0", + "@opencode-ai/sdk": "^1.4.9", + "@radix-ui/colors": "^3.0.0", + "@solid-primitives/event-bus": "^1.1.2", + "@solid-primitives/storage": "^4.3.3", + "@solidjs/router": "^0.15.4", + "@tanstack/solid-virtual": "^3.13.19", + "fuzzysort": "^3.1.0", + "jsonc-parser": "^3.2.1", + "lucide-solid": "^0.562.0", + "marked": "^17.0.1", + "solid-js": "^1.9.0" + } +} diff --git a/apps/story-book/src/index.tsx b/apps/story-book/src/index.tsx new file mode 100644 index 0000000000..7548a455b6 --- /dev/null +++ b/apps/story-book/src/index.tsx @@ -0,0 +1,65 @@ +/* @refresh reload */ +import { render } from "solid-js/web"; + +import "../../app/src/app/index.css"; +import { openDesktopUrl, relaunchDesktopApp } from "../../app/src/app/lib/desktop"; +import { ConnectionsProvider } from "../../app/src/app/connections/provider"; +import { PlatformProvider, type Platform } from "../../app/src/app/context/platform"; +import { bootstrapTheme } from "../../app/src/app/theme"; +import { isDesktopRuntime } from "../../app/src/app/utils"; +import { initLocale } from "../../app/src/i18n"; +import NewLayoutApp from "./new-layout"; + +bootstrapTheme(); +initLocale(); + +const root = document.getElementById("root"); + +if (!root) { + throw new Error("Root element not found"); +} + +const platform: Platform = { + platform: isDesktopRuntime() ? "desktop" : "web", + openLink(url: string) { + if (isDesktopRuntime()) { + void openDesktopUrl(url).catch(() => undefined); + return; + } + window.open(url, "_blank"); + }, + restart: async () => { + if (isDesktopRuntime()) { + await relaunchDesktopApp(); + return; + } + window.location.reload(); + }, + notify: async () => undefined, + storage: (name) => { + const prefix = name ? `${name}:` : ""; + return { + getItem: (key) => window.localStorage.getItem(prefix + key), + setItem: (key, value) => window.localStorage.setItem(prefix + key, value), + removeItem: (key) => window.localStorage.removeItem(prefix + key), + }; + }, + fetch, +}; + +const storyConnectionsStore = { + mcpServers: () => [], + mcpStatuses: () => ({}), + mcpStatus: () => null, +} as any; + +render( + () => ( + + + + + + ), + root, +); diff --git a/apps/story-book/src/mock-data.ts b/apps/story-book/src/mock-data.ts new file mode 100644 index 0000000000..654689bf55 --- /dev/null +++ b/apps/story-book/src/mock-data.ts @@ -0,0 +1,285 @@ +import type { Part } from "@opencode-ai/sdk/v2/client"; + +import type { MessageWithParts } from "../../app/src/app/types"; +import type { WorkspaceInfo } from "../../app/src/app/lib/desktop"; + +export type StoryScreen = "session" | "settings" | "components" | "onboarding"; + +export type StoryStep = { + label: string; + detail: string; + state: "done" | "active" | "queued"; +}; + +export const storyWorkspaces: WorkspaceInfo[] = [ + { + id: "local-foundation", + name: "Local Foundation", + displayName: "OpenWork App", + path: "~/OpenWork/app", + preset: "starter", + workspaceType: "local", + }, + { + id: "remote-worker", + name: "Remote Worker", + displayName: "Ops Worker", + path: "remote://ops-worker", + preset: "automation", + workspaceType: "remote", + remoteType: "openwork", + baseUrl: "https://worker.openworklabs.com/opencode", + openworkHostUrl: "https://worker.openworklabs.com", + openworkWorkspaceName: "Ops Worker", + sandboxBackend: "docker", + sandboxContainerName: "openwork-ops-worker", + }, +]; + +export const sessionList = [ + { title: "Refresh den cloud worker states", meta: "6m ago", active: true }, + { title: "Polish mobile workspace connect flow", meta: "31m ago", active: false }, + { title: "Audit release screenshots", meta: "Yesterday", active: false }, + { title: "Tighten status copy in settings", meta: "Yesterday", active: false }, +]; + +export const progressItems = [ + { label: "Connect provider", done: true }, + { label: "Review shell layout", done: true }, + { label: "Mock key session states", done: false }, + { label: "Capture PR screenshots", done: false }, +]; + +const baseTime = Date.now() - 12 * 60 * 1000; + +function messageInfo( + id: string, + role: "user" | "assistant", + createdOffsetMs: number, +): MessageWithParts["info"] { + return { + id, + sessionID: "story-shell-session", + role, + time: { + created: baseTime + createdOffsetMs, + ...(role === "assistant" + ? { completed: baseTime + createdOffsetMs + 20_000 } + : {}), + }, + } as MessageWithParts["info"]; +} + +function toolPart( + tool: string, + status: "completed" | "running" | "pending" | "error", + input: Record, + extras: Record = {}, +): Part { + return { + type: "tool", + tool, + state: { + status, + input, + ...extras, + }, + } as Part; +} + +export const sessionMessages: MessageWithParts[] = [ + { + info: messageInfo("sb-msg-1", "user", 0), + parts: [ + { + type: "text", + text: + "Build a faithful story-book for the OpenWork app so we can iterate on the shell, session timeline, settings cards, and onboarding without touching the live runtime. Also make the mocked transcript feel closer to a real OpenWork session, including tool activity.", + } as Part, + ], + }, + { + info: messageInfo("sb-msg-2", "assistant", 25_000), + parts: [ + { + type: "text", + text: + "I audited the live session surface first so the mock keeps the same shell proportions, transcript rhythm, and action rail behavior.", + } as Part, + toolPart( + "read", + "completed", + { + filePath: "apps/app/src/app/pages/session.tsx", + offset: 4246, + limit: 220, + }, + { + output: "Reviewed the live session header, command strip, and transcript layout bindings.", + }, + ), + toolPart( + "grep", + "completed", + { + pattern: "tool|Command\\+K|compactSessionHistory|MessageList", + path: "apps/app/src/app", + }, + { + output: "Found the real message timeline and tool summary helpers in message-list.tsx and utils/index.ts.", + }, + ), + { + type: "reasoning", + text: + "Thinking: the mock should not invent a separate timeline widget. The fastest way to reach parity is to feed richer fake parts into the exact same MessageList surface the app already uses.", + } as Part, + { + type: "text", + text: + "That keeps the story-book useful for UI decisions while avoiding a parallel rendering path that could drift from the real app.", + } as Part, + ], + }, + { + info: messageInfo("sb-msg-3", "assistant", 70_000), + parts: [ + { + type: "text", + text: + "I then mocked a more realistic execution pass so the story transcript shows the same kinds of steps users see in production.", + } as Part, + toolPart( + "apply_patch", + "completed", + { + filePath: "apps/story-book/src/story-book.tsx", + }, + { + output: "Success. Updated the following files: M apps/story-book/src/story-book.tsx", + }, + ), + toolPart( + "bash", + "completed", + { + command: "pnpm --filter story-book build", + description: "Build story-book app to verify compile", + }, + { + output: + "vite v6.4.1 building for production...\n✓ 1966 modules transformed.\n✓ built in 2.95s", + }, + ), + toolPart( + "task", + "completed", + { + description: "Review session tool-call fidelity", + subagent_type: "explore", + }, + { + metadata: { + sessionId: "story-subagent-session", + }, + output: + "Subagent reviewed the session transcript surface and recommended using the live MessageList grouping semantics.", + }, + ), + { + type: "text", + text: + "The result is still mocked data, but the transcript now exercises the real tool-call affordances: exploration summaries, individual action rows, and post-action assistant copy.", + } as Part, + ], + }, + { + info: messageInfo("sb-msg-4", "assistant", 105_000), + parts: [ + { + type: "text", + text: + "Next pass, we can add a few alternate transcript scenarios here too: running tools, failed commands, and a nested subagent thread with its own mini timeline.", + } as Part, + ], + }, +]; + +export const settingsTabs = ["General", "Cloud", "Model", "Advanced", "Debug"] as const; + +export const settingsCards = [ + { + title: "Runtime", + eyebrow: "Core services", + body: "Status for your local engine and OpenWork server with versioning, connection health, and repair actions.", + points: [ + "OpenCode engine ready on localhost:4096", + "OpenWork server proxied for remote workers", + "Developer mode enabled for design QA", + ], + action: "Reconnect runtime", + }, + { + title: "Providers", + eyebrow: "Models + auth", + body: "Compact surface for provider connection state, default model choice, and reasoning depth defaults.", + points: [ + "Anthropic connected", + "OpenAI connected", + "Default model: Claude Sonnet 4", + ], + action: "Manage providers", + }, + { + title: "Remote worker", + eyebrow: "Cloud worker", + body: "Connection card for hosted workspaces with URL, token state, and reconnect controls.", + points: [ + "Worker URL copied into the shell", + "Last heartbeat 18s ago", + "Sandbox container detected", + ], + action: "Refresh worker", + }, + { + title: "Updates", + eyebrow: "Desktop", + body: "Patch notes and delivery state for the desktop app, orchestrator, and router sidecars.", + points: [ + "Auto-check weekly", + "Download on Wi-Fi only", + "Restart banner prepared", + ], + action: "Check for updates", + }, +]; + +export const onboardingChoices = [ + { + title: "Create local workspace", + detail: "Spin up a local OpenWork folder with reusable skills and project memory.", + }, + { + title: "Connect remote worker", + detail: "Attach to a hosted worker using OpenWork URL + token for shared remote execution.", + }, +]; + +export const screenCopy: Record = { + session: { + title: "Session shell", + detail: "The full operational canvas: left rail, timeline, composer, utility rail, and status bar.", + }, + settings: { + title: "Settings stack", + detail: "Dense control cards for runtime health, providers, remote workers, and update handling.", + }, + components: { + title: "Core components", + detail: "Buttons, inputs, chips, cards, status rail, and other primitives pulled from the live app language.", + }, + onboarding: { + title: "Onboarding", + detail: "First-run surfaces for theme choice, workspace creation, and remote worker connection.", + }, +}; diff --git a/apps/story-book/src/new-layout.tsx b/apps/story-book/src/new-layout.tsx new file mode 100644 index 0000000000..c3d6785bbf --- /dev/null +++ b/apps/story-book/src/new-layout.tsx @@ -0,0 +1,1758 @@ +import { For, Show, createEffect, createMemo, createSignal, onCleanup } from "solid-js"; +import { Redo2, Search, Undo2, X } from "lucide-solid"; + +import Button from "../../app/src/app/components/button"; +import DenSettingsPanel from "../../app/src/app/components/den-settings-panel"; +import ModelPickerModal from "../../app/src/app/components/model-picker-modal"; +import StatusBar from "../../app/src/app/components/status-bar"; +import Composer from "../../app/src/app/components/session/composer"; +import MessageList from "../../app/src/app/components/session/message-list"; +import WorkspaceSessionList from "../../app/src/app/components/session/workspace-session-list"; +import { + CreateWorkspaceModal, + ShareWorkspaceModal, +} from "../../app/src/app/workspace"; +import { MCP_QUICK_CONNECT, SUGGESTED_PLUGINS } from "../../app/src/app/constants"; +import { createWorkspaceShellLayout } from "../../app/src/app/lib/workspace-shell-layout"; +import { getModelBehaviorSummary, sanitizeModelBehaviorValue } from "../../app/src/app/lib/model-behavior"; +import type { OpenworkServerClient } from "../../app/src/app/lib/openwork-server"; +import ExtensionsView from "../../app/src/app/pages/extensions"; +import IdentitiesView from "../../app/src/app/pages/identities"; +import { + applyThemeMode, + getInitialThemeMode, + persistThemeMode, + subscribeToSystemTheme, + type ThemeMode, +} from "../../app/src/app/theme"; +import type { + ComposerDraft, + HubSkillCard, + HubSkillRepo, + McpServerEntry, + McpStatusMap, + MessageWithParts, + ModelOption, + ModelRef, + PluginScope, + ProviderListItem, + SlashCommandOption, + SkillCard, + WorkspaceConnectionState, + WorkspacePreset, + WorkspaceSessionGroup, +} from "../../app/src/app/types"; +import { sessionMessages, storyWorkspaces } from "./mock-data"; + +type CommandPaletteMode = "root" | "sessions"; +type SettingsTab = "general" | "den" | "model" | "skills" | "extensions" | "messaging" | "advanced" | "appearance" | "updates" | "recovery" | "debug"; + +type CommandPaletteItem = { + id: string; + title: string; + detail?: string; + meta?: string; + action: () => void; +}; + +const localWorkspace = storyWorkspaces[0] ?? { + id: "local-foundation", + name: "Local Foundation", + displayName: "OpenWork App", + path: "~/OpenWork/app", + preset: "starter", + workspaceType: "local" as const, +}; + +const remoteWorkspace = storyWorkspaces[1] ?? { + id: "remote-worker", + name: "Remote Worker", + displayName: "Ops Worker", + path: "remote://ops-worker", + preset: "automation", + workspaceType: "remote" as const, + remoteType: "openwork" as const, + baseUrl: "https://worker.openworklabs.com/opencode", + openworkHostUrl: "https://worker.openworklabs.com", + openworkWorkspaceName: "Ops Worker", + sandboxBackend: "docker" as const, + sandboxContainerName: "openwork-ops-worker", +}; + +const now = Date.now(); + +const workspaceSessionGroups: WorkspaceSessionGroup[] = [ + { + workspace: localWorkspace, + status: "ready", + sessions: [ + { + id: "sb-session-shell", + title: "Story shell parity with session.tsx", + slug: "story-shell-parity", + time: { updated: now - 2 * 60 * 1000, created: now - 22 * 60 * 1000 }, + }, + { + id: "sb-session-provider", + title: "Provider states and status rail", + slug: "provider-states", + time: { updated: now - 18 * 60 * 1000, created: now - 56 * 60 * 1000 }, + }, + { + id: "sb-session-mobile", + title: "Mobile shell spacing pass", + slug: "mobile-shell-pass", + time: { updated: now - 56 * 60 * 1000, created: now - 3 * 60 * 60 * 1000 }, + }, + ], + }, + { + workspace: remoteWorkspace, + status: "ready", + sessions: [ + { + id: "sb-session-remote", + title: "Remote worker onboarding", + slug: "remote-worker-onboarding", + time: { updated: now - 7 * 60 * 1000, created: now - 2 * 60 * 60 * 1000 }, + }, + { + id: "sb-session-inbox", + title: "Inbox upload behavior", + slug: "inbox-upload", + time: { updated: now - 35 * 60 * 1000, created: now - 6 * 60 * 60 * 1000 }, + }, + ], + }, +]; + +const workspaceConnectionStateById: Record = { + [localWorkspace.id]: { status: "connected", message: "Local engine ready" }, + [remoteWorkspace.id]: { status: "connected", message: "Connected via token" }, +}; + +const sessionStatusById: Record = { + "sb-session-shell": "running", + "sb-session-provider": "idle", + "sb-session-mobile": "idle", + "sb-session-remote": "idle", + "sb-session-inbox": "idle", +}; + +const mcpStatuses: McpStatusMap = { + "chrome-devtools": { status: "connected" }, + notion: { status: "connected" }, + linear: { status: "needs_auth" }, +}; + +const workingFiles = [ + "apps/story-book/src/story-book.tsx", + "apps/app/src/app/pages/session.tsx", + "apps/app/src/app/components/session/workspace-session-list.tsx", + "apps/app/src/app/components/session/inbox-panel.tsx", +]; + +const commandOptions: SlashCommandOption[] = [ + { id: "design-review", name: "design-review", description: "Open a design review pass", source: "command" }, + { id: "test-flow", name: "test-flow", description: "Run shell flow checks", source: "skill" }, +]; + +const storyModels: Array<{ + ref: ModelRef; + title: string; + description: string; + isConnected: boolean; + model: ProviderListItem["models"][string]; +}> = [ + { + ref: { providerID: "anthropic", modelID: "claude-sonnet-4-5-20250929" }, + title: "Claude Sonnet 4.5", + description: "Anthropic", + isConnected: true, + model: { + id: "claude-sonnet-4-5-20250929", + name: "Claude Sonnet 4.5", + reasoning: true, + variants: { high: {}, max: {} }, + } as unknown as ProviderListItem["models"][string], + }, + { + ref: { providerID: "openai", modelID: "gpt-5" }, + title: "GPT-5", + description: "OpenAI", + isConnected: true, + model: { + id: "gpt-5", + name: "GPT-5", + reasoning: true, + variants: { none: {}, minimal: {}, low: {}, medium: {}, high: {}, xhigh: {} }, + } as unknown as ProviderListItem["models"][string], + }, + { + ref: { providerID: "deepseek", modelID: "deepseek-r1" }, + title: "DeepSeek R1", + description: "DeepSeek", + isConnected: true, + model: { + id: "deepseek-r1", + name: "DeepSeek R1", + reasoning: true, + variants: {}, + } as unknown as ProviderListItem["models"][string], + }, + { + ref: { providerID: "openrouter", modelID: "grok-4" }, + title: "Grok 4", + description: "OpenRouter", + isConnected: false, + model: { + id: "grok-4", + name: "Grok 4", + reasoning: false, + variants: {}, + } as unknown as ProviderListItem["models"][string], + }, +]; + +const mockShareFields = [ + { + label: "Worker URL", + value: "https://worker.openworklabs.com/opencode", + hint: "Paste this into Add worker -> Connect remote.", + }, + { + label: "Password", + value: "ow_story_worker_owner_password_7f9a1b3c", + secret: true, + hint: "Use when the remote client must answer permission prompts.", + }, + { + label: "Collaborator token", + value: "ow_story_worker_collab_token_1c4d2e8a", + secret: true, + hint: "Routine access when you do not need owner-only actions.", + }, +] as const; + +const initialSkills: SkillCard[] = [ + { + name: "workspace-guide", + path: ".opencode/skills/workspace-guide/SKILL.md", + description: "Guide users through workspace onboarding.", + trigger: "workspace guide", + }, + { + name: "design-review", + path: ".opencode/skills/design-review/SKILL.md", + description: "Run a design review pass over the current shell.", + trigger: "review this design", + }, + { + name: "test-flow", + path: ".opencode/skills/test-flow/SKILL.md", + description: "Exercise the Storybook flow before shipping.", + trigger: "test the flow", + }, +]; + +const initialSkillContents: Record = { + "workspace-guide": "# Workspace Guide\n\nHelp users connect a worker, open settings, and understand the shell layout.", + "design-review": "# Design Review\n\nReview the current screen for hierarchy, spacing, and state clarity.", + "test-flow": "# Test Flow\n\nRun the visible Storybook flow and note any interaction regressions.", +}; + +const initialHubRepo: HubSkillRepo = { + owner: "different-ai", + repo: "openwork-hub", + ref: "main", +}; + +const initialHubSkills: HubSkillCard[] = [ + { + name: "worker-smoke", + description: "Smoke-test remote worker setup and report any blockers.", + trigger: "worker smoke", + source: { owner: "different-ai", repo: "openwork-hub", ref: "main", path: "skills/worker-smoke/SKILL.md" }, + }, + { + name: "share-review", + description: "Review share links, field labeling, and copy clarity.", + trigger: "share review", + source: { owner: "different-ai", repo: "openwork-hub", ref: "main", path: "skills/share-review/SKILL.md" }, + }, +]; + +const initialMcpServers: McpServerEntry[] = [ + { + name: "notion", + config: { type: "remote", url: "https://mcp.notion.com/mcp", enabled: true }, + }, + { + name: "linear", + config: { type: "remote", url: "https://mcp.linear.app/mcp", enabled: true }, + }, + { + name: "chrome-devtools", + config: { type: "local", command: ["npx", "-y", "chrome-devtools-mcp@latest"], enabled: true }, + }, +]; + +function toMessageParts(id: string, role: "user" | "assistant", text: string): MessageWithParts { + return { + info: { + id, + sessionID: "story-shell-session", + role, + time: { created: Date.now() }, + } as MessageWithParts["info"], + parts: [{ type: "text", text } as MessageWithParts["parts"][number]], + }; +} + +export default function NewLayoutApp() { + const [selectedWorkspaceId, setSelectedWorkspaceId] = createSignal(localWorkspace.id); + const [selectedSessionId, setSelectedSessionId] = createSignal("sb-session-shell"); + + const [themeMode] = createSignal(getInitialThemeMode()); + const [composerPrompt, setComposerPrompt] = createSignal( + "Use this mock shell to design layout changes before touching the live session runtime.", + ); + const [composerToast, setComposerToast] = createSignal(null); + const [selectedAgent, setSelectedAgent] = createSignal(null); + const [selectedModel, setSelectedModel] = createSignal(storyModels[0].ref); + const [modelVariant, setModelVariant] = createSignal("medium"); + const [modelPickerOpen, setModelPickerOpen] = createSignal(false); + const [modelPickerTarget, setModelPickerTarget] = createSignal<"default" | "session">("session"); + const [modelPickerQuery, setModelPickerQuery] = createSignal(""); + const [createWorkspaceOpen, setCreateWorkspaceOpen] = createSignal(false); + const [createWorkspaceSubmitting, setCreateWorkspaceSubmitting] = createSignal(false); + const [mockFolderPickCount, setMockFolderPickCount] = createSignal(0); + const [agentPickerOpen, setAgentPickerOpen] = createSignal(false); + const [shareWorkspaceId, setShareWorkspaceId] = createSignal(null); + const [messageRows, setMessageRows] = createSignal(sessionMessages); + const [expandedStepIds, setExpandedStepIds] = createSignal(new Set()); + const [headerActionBusy, setHeaderActionBusy] = createSignal<"undo" | "redo" | "compact" | null>(null); + const [commandPaletteOpen, setCommandPaletteOpen] = createSignal(false); + const [commandPaletteMode, setCommandPaletteMode] = createSignal("root"); + const [commandPaletteQuery, setCommandPaletteQuery] = createSignal(""); + const [commandPaletteActiveIndex, setCommandPaletteActiveIndex] = createSignal(0); + let commandPaletteInputEl: HTMLInputElement | undefined; + const commandPaletteOptionRefs: HTMLButtonElement[] = []; + const [skills, setSkills] = createSignal(initialSkills); + const [skillContents, setSkillContents] = createSignal>(initialSkillContents); + const [hubRepo, setHubRepo] = createSignal(initialHubRepo); + const [hubRepos, setHubRepos] = createSignal([initialHubRepo]); + const [hubSkills] = createSignal(initialHubSkills); + const [pluginScope, setPluginScope] = createSignal("project"); + const [pluginInput, setPluginInput] = createSignal(""); + const [pluginList, setPluginList] = createSignal(["@openwork/browser-mcp"]); + const [pluginStatus, setPluginStatus] = createSignal("Sandbox plugin config loaded."); + const [activePluginGuide, setActivePluginGuide] = createSignal(null); + const [selectedMcp, setSelectedMcp] = createSignal("notion"); + const [storyMcpServers, setStoryMcpServers] = createSignal(initialMcpServers); + const [storyMcpStatuses, setStoryMcpStatuses] = createSignal(mcpStatuses); + const [messagingModuleEnabled, setMessagingModuleEnabled] = createSignal(true); + const [telegramIdentityRows, setTelegramIdentityRows] = createSignal([ + { id: "telegram-story", enabled: true, running: true, access: "private", pairingRequired: false }, + ]); + const [slackIdentityRows, setSlackIdentityRows] = createSignal([ + { id: "slack-story", enabled: true, running: true }, + ]); + const [routerAgentContent, setRouterAgentContent] = createSignal( + "# OpenCodeRouter Messaging Agent\n\nKeep responses concise and route follow-ups to the right worker.", + ); + const [routerAgentUpdatedAt, setRouterAgentUpdatedAt] = createSignal(now); + + const { + leftSidebarWidth, + startLeftSidebarResize, + } = createWorkspaceShellLayout({ expandedRightWidth: 320 }); + + createEffect(() => { + const mode = themeMode(); + persistThemeMode(mode); + applyThemeMode(mode); + }); + + createEffect(() => { + const unsubscribeSystemTheme = subscribeToSystemTheme(() => { + if (themeMode() === "system") { + applyThemeMode("system"); + } + }); + onCleanup(() => unsubscribeSystemTheme()); + }); + + const selectedSessionTitle = createMemo(() => { + const target = selectedSessionId(); + if (!target) return "New session"; + for (const group of workspaceSessionGroups) { + const found = group.sessions.find((session) => session.id === target); + if (found) return found.title; + } + return "New session"; + }); + const [showingSettings, setShowingSettings] = createSignal(false); + const [settingsTab, setSettingsTab] = createSignal("general"); + + const workspaceTabs = createMemo(() => [ + "general", + "model", + "skills", + "extensions", + "messaging", + "advanced", + ]); + const globalTabs = createMemo(() => ["den", "appearance", "updates", "recovery", "debug"]); + + const tabLabel = (tab: SettingsTab) => { + switch (tab) { + case "den": + return "Cloud"; + case "model": + return "Model"; + case "skills": + return "Skills"; + case "extensions": + return "Extensions"; + case "messaging": + return "Messaging"; + case "advanced": + return "Advanced"; + case "appearance": + return "Appearance"; + case "updates": + return "Updates"; + case "recovery": + return "Recovery"; + case "debug": + return "Debug"; + default: + return "General"; + } + }; + + const tabDescription = (tab: SettingsTab) => { + switch (tab) { + case "den": + return "Manage your OpenWork Cloud connection, hosted workers, and workspace access."; + case "model": + return "Tune the default model, runtime behavior, and assistant output settings."; + case "skills": + return "Browse skill surfaces and pinned shortcuts for this workspace."; + case "extensions": + return "Inspect extension-style integrations for this workspace shell."; + case "messaging": + return "Keep messaging and inbox tools inside workspace settings instead of the right rail."; + case "advanced": + return "Inspect runtime health, connection state, and developer-facing controls."; + case "appearance": + return "Adjust how OpenWork looks across desktop, system theme, and app chrome."; + case "updates": + return "Keep the app current with quiet background checks and install controls."; + case "recovery": + return "Repair migration state, reset workspace defaults, and recover local settings."; + case "debug": + return "Review runtime diagnostics, logs, and low-level debugging utilities."; + default: + return "Connect providers, authorize folders, and control the active OpenWork workspace."; + } + }; + + const settingsRailClass = "rounded-[24px] border border-dls-border bg-dls-sidebar p-3"; + const settingsPanelClass = "rounded-[28px] border border-dls-border bg-dls-surface p-5 md:p-6"; + const settingsPanelSoftClass = "rounded-2xl border border-gray-6/60 bg-gray-1/40 p-4"; + + const activeWorkspace = createMemo( + () => workspaceSessionGroups.find((group) => group.workspace.id === selectedWorkspaceId())?.workspace ?? localWorkspace, + ); + const shareWorkspace = createMemo( + () => storyWorkspaces.find((workspace) => workspace.id === shareWorkspaceId()) ?? null, + ); + const shareWorkspaceName = createMemo( + () => shareWorkspace()?.displayName?.trim() || shareWorkspace()?.name?.trim() || "Workspace", + ); + const shareWorkspaceDetail = createMemo(() => { + const workspace = shareWorkspace(); + if (!workspace) return null; + if (workspace.workspaceType === "remote") return workspace.baseUrl ?? workspace.path ?? null; + return workspace.path ?? null; + }); + const selectedWorkspaceRoot = createMemo(() => activeWorkspace().path?.trim() || ""); + + const refreshSkills = () => setComposerToast("Story-book: refreshed skills."); + const refreshHubSkills = () => setComposerToast("Story-book: refreshed hub skills."); + + const installSkillCreator = async () => { + if (!skills().some((skill) => skill.name === "skill-creator")) { + setSkills((current) => [ + ...current, + { + name: "skill-creator", + path: ".opencode/skills/skill-creator/SKILL.md", + description: "Create new skills from chat.", + trigger: "create a skill", + }, + ]); + setSkillContents((current) => ({ + ...current, + "skill-creator": "# Skill Creator\n\nCreate a new OpenCode skill from a prompt.", + })); + } + return { ok: true, message: "Installed skill-creator in the Storybook sandbox." }; + }; + + const installHubSkill = async (name: string) => { + const hubSkill = initialHubSkills.find((item) => item.name === name); + if (!hubSkill) return { ok: false, message: `Skill ${name} not found.` }; + if (!skills().some((skill) => skill.name === name)) { + setSkills((current) => [ + ...current, + { + name: hubSkill.name, + path: `.opencode/skills/${hubSkill.name}/SKILL.md`, + description: hubSkill.description, + trigger: hubSkill.trigger, + }, + ]); + setSkillContents((current) => ({ + ...current, + [hubSkill.name]: `# ${hubSkill.name}\n\n${hubSkill.description ?? "Imported from the hub."}`, + })); + } + return { ok: true, message: `Installed ${name} from the Storybook hub.` }; + }; + + const addHubRepo = (repo: Partial) => { + const next: HubSkillRepo = { + owner: repo.owner?.trim() || initialHubRepo.owner, + repo: repo.repo?.trim() || initialHubRepo.repo, + ref: repo.ref?.trim() || initialHubRepo.ref, + }; + setHubRepos((current) => { + if (current.some((item) => item.owner === next.owner && item.repo === next.repo && item.ref === next.ref)) { + return current; + } + return [...current, next]; + }); + setHubRepo(next); + }; + + const removeHubRepo = (repo: Partial) => { + setHubRepos((current) => + current.filter((item) => !(item.owner === repo.owner && item.repo === repo.repo && item.ref === repo.ref)), + ); + if (hubRepo()?.owner === repo.owner && hubRepo()?.repo === repo.repo && hubRepo()?.ref === repo.ref) { + setHubRepo(initialHubRepo); + } + }; + + const revealSkillsFolder = () => setComposerToast("Story-book: reveal skills folder is mocked."); + + const uninstallSkill = (name: string) => { + setSkills((current) => current.filter((skill) => skill.name !== name)); + setSkillContents((current) => { + const next = { ...current }; + delete next[name]; + return next; + }); + }; + + const readSkill = async (name: string) => { + const skill = skills().find((item) => item.name === name); + if (!skill) return null; + return { + name, + path: skill.path, + content: skillContents()[name] ?? `# ${name}\n\nStory-book skill content.`, + }; + }; + + const saveSkill = (input: { name: string; content: string; description?: string }) => { + const path = `.opencode/skills/${input.name}/SKILL.md`; + setSkills((current) => { + const existing = current.find((skill) => skill.name === input.name); + if (existing) { + return current.map((skill) => + skill.name === input.name + ? { ...skill, description: input.description ?? skill.description, path } + : skill, + ); + } + return [...current, { name: input.name, path, description: input.description }]; + }); + setSkillContents((current) => ({ ...current, [input.name]: input.content })); + }; + + const importLocalSkill = () => { + saveSkill({ + name: "imported-local-skill", + content: "# Imported Local Skill\n\nThis came from the Storybook sandbox import action.", + description: "Imported into the sandbox.", + }); + setComposerToast("Story-book: imported a local skill."); + }; + + const refreshPlugins = (scopeOverride?: PluginScope) => { + setPluginStatus(`Refreshed ${scopeOverride ?? pluginScope()} plugins in Storybook.`); + }; + + const isPluginInstalled = (name: string, aliases?: string[]) => { + const installed = new Set(pluginList()); + if (installed.has(name)) return true; + return (aliases ?? []).some((alias) => installed.has(alias)); + }; + + const addPlugin = (pluginNameOverride?: string) => { + const nextName = (pluginNameOverride ?? pluginInput()).trim(); + if (!nextName) return; + if (!isPluginInstalled(nextName)) { + setPluginList((current) => [...current, nextName]); + } + setPluginInput(""); + setPluginStatus(`${nextName} added in Storybook.`); + }; + + const removePlugin = (pluginName: string) => { + setPluginList((current) => current.filter((item) => item !== pluginName)); + setPluginStatus(`${pluginName} removed in Storybook.`); + }; + + const refreshMcpServers = () => setComposerToast("Story-book: refreshed MCP servers."); + + const connectMcp = (entry: (typeof MCP_QUICK_CONNECT)[number]) => { + const key = entry.id ?? entry.name.toLowerCase().replace(/[^a-z0-9]+/g, "-"); + if (!storyMcpServers().some((server) => server.name === key)) { + setStoryMcpServers((current) => [ + ...current, + { + name: key, + config: { + type: entry.type ?? "remote", + ...(entry.url ? { url: entry.url } : {}), + ...(entry.command ? { command: entry.command } : {}), + enabled: true, + }, + }, + ]); + } + setStoryMcpStatuses((current) => ({ ...current, [key]: { status: entry.oauth ? "needs_auth" : "connected" } })); + setSelectedMcp(key); + }; + + const authorizeMcp = (entry: McpServerEntry) => { + setStoryMcpStatuses((current) => ({ ...current, [entry.name]: { status: "connected" } })); + }; + + const logoutMcpAuth = async (name: string) => { + setStoryMcpStatuses((current) => ({ ...current, [name]: { status: "needs_auth" } })); + }; + + const removeMcp = (name: string) => { + setStoryMcpServers((current) => current.filter((entry) => entry.name !== name)); + setStoryMcpStatuses((current) => { + const next = { ...current }; + delete next[name]; + return next; + }); + if (selectedMcp() === name) setSelectedMcp(null); + }; + + const buildRouterHealth = () => ({ + ok: true, + opencode: { + url: "https://worker.openworklabs.com/opencode", + healthy: true, + version: "0.1.0-story", + }, + channels: { + telegram: telegramIdentityRows().some((item) => item.enabled), + whatsapp: false, + slack: slackIdentityRows().some((item) => item.enabled), + }, + config: { + groupsEnabled: true, + }, + activity: { + dayStart: now - 12 * 60 * 60 * 1000, + inboundToday: 4, + outboundToday: 9, + lastMessageAt: now - 12 * 60 * 1000, + }, + agent: { + scope: "workspace" as const, + path: ".opencode/agents/opencode-router.md", + loaded: true, + selected: "openwork-router", + }, + }); + + const mockOpenworkServerClient = { + getConfig: async () => ({ openwork: { messaging: { enabled: messagingModuleEnabled() } } }), + getOpenCodeRouterHealth: async () => ({ ok: true, status: 200, json: buildRouterHealth() }), + getOpenCodeRouterTelegramIdentities: async () => ({ ok: true, items: telegramIdentityRows() }), + getOpenCodeRouterSlackIdentities: async () => ({ ok: true, items: slackIdentityRows() }), + getOpenCodeRouterTelegram: async () => ({ + ok: true, + configured: true, + enabled: messagingModuleEnabled(), + bot: { id: 1, username: "openwork_storybot", name: "OpenWork Story Bot" }, + }), + readWorkspaceFile: async (_workspaceId: string, path: string) => ({ + path, + content: routerAgentContent(), + bytes: routerAgentContent().length, + updatedAt: routerAgentUpdatedAt(), + }), + writeWorkspaceFile: async (_workspaceId: string, input: { path: string; content: string }) => { + const updatedAt = Date.now(); + setRouterAgentContent(input.content); + setRouterAgentUpdatedAt(updatedAt); + return { ok: true, path: input.path, bytes: input.content.length, updatedAt }; + }, + sendOpenCodeRouterMessage: async ( + _workspaceId: string, + input: { channel: string; text: string; directory?: string; peerId?: string }, + ) => ({ + ok: true, + channel: input.channel, + directory: input.directory ?? selectedWorkspaceRoot(), + peerId: input.peerId, + attempted: 1, + sent: 1, + reason: "Delivered by the Storybook sandbox client.", + }), + patchConfig: async ( + _workspaceId: string, + payload: { openwork?: { messaging?: { enabled?: boolean } } }, + ) => { + const enabled = payload.openwork?.messaging?.enabled; + if (typeof enabled === "boolean") setMessagingModuleEnabled(enabled); + return { ok: true }; + }, + upsertOpenCodeRouterTelegramIdentity: async ( + _workspaceId: string, + input: { enabled: boolean; access?: "public" | "private"; pairingRequired?: boolean }, + ) => { + const next = { + id: "telegram-story", + enabled: input.enabled, + running: true, + access: input.access ?? "private", + pairingRequired: input.pairingRequired ?? false, + }; + setTelegramIdentityRows([next]); + return { + ok: true, + telegram: { + id: next.id, + enabled: next.enabled, + access: next.access, + pairingRequired: next.pairingRequired, + pairingCode: next.pairingRequired ? "STORY42" : undefined, + bot: { id: 1, username: "openwork_storybot", name: "OpenWork Story Bot" }, + }, + }; + }, + deleteOpenCodeRouterTelegramIdentity: async (_workspaceId: string, identityId: string) => { + setTelegramIdentityRows((current) => current.filter((item) => item.id !== identityId)); + return { ok: true, telegram: { id: identityId, deleted: true } }; + }, + upsertOpenCodeRouterSlackIdentity: async (_workspaceId: string, input: { enabled: boolean }) => { + const next = { id: "slack-story", enabled: input.enabled, running: true }; + setSlackIdentityRows([next]); + return { ok: true, slack: { id: next.id, enabled: next.enabled } }; + }, + deleteOpenCodeRouterSlackIdentity: async (_workspaceId: string, identityId: string) => { + setSlackIdentityRows((current) => current.filter((item) => item.id !== identityId)); + return { ok: true, slack: { id: identityId, deleted: true } }; + }, + } as unknown as OpenworkServerClient; + + const agentLabel = createMemo(() => { + const name = selectedAgent() ?? "Default agent"; + return name.charAt(0).toUpperCase() + name.slice(1); + }); + + const selectedStoryModel = createMemo( + () => storyModels.find((entry) => entry.ref.providerID === selectedModel().providerID && entry.ref.modelID === selectedModel().modelID) + ?? storyModels[0], + ); + + const selectedBehavior = createMemo(() => + getModelBehaviorSummary( + selectedStoryModel().ref.providerID, + selectedStoryModel().model, + modelVariant(), + ), + ); + + const selectedModelLabel = createMemo(() => selectedStoryModel().title); + + const storyModelOptions = createMemo(() => + storyModels.map((entry) => { + const behavior = getModelBehaviorSummary(entry.ref.providerID, entry.model, modelVariant()); + return { + providerID: entry.ref.providerID, + modelID: entry.ref.modelID, + title: entry.title, + description: entry.description, + footer: entry.ref.providerID === selectedModel().providerID && entry.ref.modelID === selectedModel().modelID + ? "Current model" + : undefined, + behaviorTitle: behavior.title, + behaviorLabel: behavior.label, + behaviorDescription: behavior.description, + behaviorValue: sanitizeModelBehaviorValue(entry.ref.providerID, entry.model, modelVariant()), + behaviorOptions: behavior.options, + isFree: false, + isConnected: entry.isConnected, + isRecommended: entry.title.includes("GPT-5") || entry.title.includes("Claude") || entry.title.includes("DeepSeek"), + }; + }), + ); + + const filteredStoryModelOptions = createMemo(() => { + const query = modelPickerQuery().trim().toLowerCase(); + if (!query) return storyModelOptions(); + return storyModelOptions().filter((option) => + [ + option.title, + option.description ?? "", + option.footer ?? "", + option.behaviorTitle, + option.behaviorLabel, + option.behaviorDescription, + `${option.providerID}/${option.modelID}`, + ] + .join(" ") + .toLowerCase() + .includes(query), + ); + }); + + const openModelPicker = (target: "default" | "session" = "session") => { + setModelPickerTarget(target); + setModelPickerQuery(""); + setModelPickerOpen(true); + }; + + const openMockCreateWorkspaceModal = () => { + setCreateWorkspaceSubmitting(false); + setCreateWorkspaceOpen(true); + }; + + const pickMockWorkspaceFolder = async () => { + const folders = [ + "/Users/demo/OpenWork/client-foundation", + "/Users/demo/OpenWork/automation-lab", + "/Users/demo/OpenWork/starter-sandbox", + ]; + const next = folders[mockFolderPickCount() % folders.length] ?? folders[0]; + setMockFolderPickCount((count) => count + 1); + await new Promise((resolve) => window.setTimeout(resolve, 180)); + return next; + }; + + const confirmMockWorkspaceCreate = (preset: WorkspacePreset, folder: string | null) => { + if (!folder || createWorkspaceSubmitting()) return; + setCreateWorkspaceSubmitting(true); + window.setTimeout(() => { + setCreateWorkspaceSubmitting(false); + setCreateWorkspaceOpen(false); + setComposerToast(`Story-book: create workspace is mocked with preset \"${preset}\" at ${folder}.`); + }, 320); + }; + + const applyStoryModelSelection = (next: ModelRef) => { + const entry = storyModels.find((item) => item.ref.providerID === next.providerID && item.ref.modelID === next.modelID); + setSelectedModel(next); + if (entry) { + setModelVariant(sanitizeModelBehaviorValue(next.providerID, entry.model, modelVariant()) ?? null); + } + setModelPickerOpen(false); + }; + + const handleDraftChange = (draft: ComposerDraft) => { + setComposerPrompt(draft.text); + }; + + const handleSend = (draft: ComposerDraft) => { + const text = (draft.resolvedText ?? draft.text ?? "").trim(); + if (!text) return; + const nowStamp = Date.now(); + setMessageRows((current) => [ + ...current, + toMessageParts(`sb-user-${nowStamp}`, "user", text), + toMessageParts( + `sb-assistant-${nowStamp}`, + "assistant", + "Story-book mock response: message accepted. This uses app MessageList + Composer with local mock state.", + ), + ]); + setComposerPrompt(""); + }; + + const runMockHeaderAction = (action: "undo" | "redo" | "compact", label: string) => { + if (headerActionBusy()) return; + setHeaderActionBusy(action); + setComposerToast(`Story-book: ${label} is mocked in this shell.`); + window.setTimeout(() => setHeaderActionBusy(null), 240); + }; + + const openMockShareModal = (workspaceId?: string | null) => { + const nextId = workspaceId?.trim() || selectedWorkspaceId(); + setShareWorkspaceId(nextId); + }; + + const totalSessionCount = createMemo(() => + workspaceSessionGroups.reduce((count, group) => count + group.sessions.length, 0), + ); + + const commandPaletteSessionOptions = createMemo(() => { + const out: Array<{ + workspaceId: string; + sessionId: string; + title: string; + workspaceTitle: string; + updatedAt: number; + searchText: string; + }> = []; + + for (const group of workspaceSessionGroups) { + const workspaceId = group.workspace.id?.trim() ?? ""; + if (!workspaceId) continue; + const workspaceTitle = group.workspace.displayName?.trim() || group.workspace.name; + for (const session of group.sessions) { + const sessionId = session.id?.trim() ?? ""; + if (!sessionId) continue; + const title = session.title; + const updatedAt = session.time?.updated ?? session.time?.created ?? 0; + out.push({ + workspaceId, + sessionId, + title, + workspaceTitle, + updatedAt, + searchText: `${title} ${workspaceTitle}`.toLowerCase(), + }); + } + } + + out.sort((a, b) => b.updatedAt - a.updatedAt); + return out; + }); + + const focusCommandPaletteInput = () => { + queueMicrotask(() => { + commandPaletteInputEl?.focus(); + commandPaletteInputEl?.select(); + }); + }; + + const openCommandPalette = (mode: CommandPaletteMode = "root") => { + setCommandPaletteMode(mode); + setCommandPaletteOpen(true); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + focusCommandPaletteInput(); + }; + + const closeCommandPalette = () => { + setCommandPaletteOpen(false); + setCommandPaletteMode("root"); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + }; + + const returnToCommandRoot = () => { + if (commandPaletteMode() === "root") return; + setCommandPaletteMode("root"); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + focusCommandPaletteInput(); + }; + + const commandPaletteRootItems = createMemo(() => { + const selectedTitle = selectedSessionTitle().trim() || "Give your selected session a clearer name"; + const items: CommandPaletteItem[] = [ + { + id: "new-session", + title: "Create new session", + detail: "Start a fresh task in the current workspace", + meta: "Create", + action: () => { + closeCommandPalette(); + setComposerToast("Story-book: create new session is mocked in this shell."); + }, + }, + { + id: "workspace", + title: "Create workspace", + detail: "Open the real workspace-creation modal in the shell", + meta: "Open", + action: () => { + closeCommandPalette(); + openMockCreateWorkspaceModal(); + }, + }, + { + id: "rename-session", + title: "Rename current session", + detail: selectedTitle, + meta: "Rename", + action: () => { + closeCommandPalette(); + setComposerToast("Story-book: rename session flow is mocked in this shell."); + }, + }, + { + id: "sessions", + title: "Search sessions", + detail: `${totalSessionCount().toLocaleString()} available across workspaces`, + meta: "Jump", + action: () => { + setCommandPaletteMode("sessions"); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + focusCommandPaletteInput(); + }, + }, + { + id: "model", + title: "Change model", + detail: `${selectedModelLabel()} · ${selectedBehavior().label}`, + meta: "Open", + action: () => { + closeCommandPalette(); + openModelPicker("session"); + }, + }, + { + id: "provider", + title: "Connect provider", + detail: "Open provider connection flow", + meta: "Open", + action: () => { + closeCommandPalette(); + setComposerToast("Story-book: provider connection flow is mocked in this shell."); + }, + }, + { + id: "settings", + title: "Open settings", + detail: "Show the real settings panel in the shell", + meta: "Open", + action: () => { + closeCommandPalette(); + setShowingSettings(true); + }, + }, + { + id: "share", + title: "Share current workspace", + detail: activeWorkspace().displayName ?? activeWorkspace().name, + meta: "Share", + action: () => { + closeCommandPalette(); + openMockShareModal(selectedWorkspaceId()); + }, + }, + ]; + + const query = commandPaletteQuery().trim().toLowerCase(); + if (!query) return items; + return items.filter((item) => `${item.title} ${item.detail ?? ""}`.toLowerCase().includes(query)); + }); + + const commandPaletteSessionItems = createMemo(() => { + const query = commandPaletteQuery().trim().toLowerCase(); + const candidates = query + ? commandPaletteSessionOptions().filter((item) => item.searchText.includes(query)) + : commandPaletteSessionOptions(); + + return candidates.slice(0, 80).map((item) => ({ + id: `session:${item.workspaceId}:${item.sessionId}`, + title: item.title, + detail: item.workspaceTitle, + meta: item.workspaceId === selectedWorkspaceId() ? "Current workspace" : "Switch", + action: () => { + closeCommandPalette(); + setSelectedWorkspaceId(item.workspaceId); + setSelectedSessionId(item.sessionId); + }, + })); + }); + + const commandPaletteItems = createMemo(() => { + const mode = commandPaletteMode(); + if (mode === "sessions") return commandPaletteSessionItems(); + return commandPaletteRootItems(); + }); + + const commandPaletteTitle = createMemo(() => { + const mode = commandPaletteMode(); + if (mode === "sessions") return "Search sessions"; + return "Quick actions"; + }); + + const commandPalettePlaceholder = createMemo(() => { + const mode = commandPaletteMode(); + if (mode === "sessions") return "Find by session title or workspace"; + return "Search actions"; + }); + + const runCommandPaletteItem = (item: CommandPaletteItem) => { + closeCommandPalette(); + item.action(); + }; + + createEffect(() => { + const onKeyDown = (event: KeyboardEvent) => { + if ((event.metaKey || event.ctrlKey) && event.key.toLowerCase() === "k") { + event.preventDefault(); + if (commandPaletteOpen()) { + closeCommandPalette(); + } else { + openCommandPalette(); + } + return; + } + + if (!commandPaletteOpen()) return; + + if (event.key === "Escape") { + event.preventDefault(); + closeCommandPalette(); + return; + } + + if (event.key === "Backspace" && !commandPaletteQuery().trim() && commandPaletteMode() !== "root") { + event.preventDefault(); + returnToCommandRoot(); + return; + } + + const items = commandPaletteItems(); + if (event.key === "ArrowDown") { + event.preventDefault(); + if (!items.length) return; + setCommandPaletteActiveIndex((index) => (index + 1) % items.length); + return; + } + + if (event.key === "ArrowUp") { + event.preventDefault(); + if (!items.length) return; + setCommandPaletteActiveIndex((index) => (index - 1 + items.length) % items.length); + return; + } + + if (event.key === "Enter") { + event.preventDefault(); + const item = items[commandPaletteActiveIndex()]; + if (!item) return; + runCommandPaletteItem(item); + } + }; + window.addEventListener("keydown", onKeyDown); + onCleanup(() => window.removeEventListener("keydown", onKeyDown)); + }); + + createEffect(() => { + const items = commandPaletteItems(); + const index = commandPaletteActiveIndex(); + if (items.length === 0) { + setCommandPaletteActiveIndex(0); + return; + } + if (index >= items.length) { + setCommandPaletteActiveIndex(items.length - 1); + } + }); + + createEffect(() => { + if (!commandPaletteOpen()) return; + const index = commandPaletteActiveIndex(); + queueMicrotask(() => { + commandPaletteOptionRefs[index]?.scrollIntoView({ block: "nearest" }); + }); + }); + + createEffect(() => { + if (!commandPaletteOpen()) return; + commandPaletteMode(); + commandPaletteQuery(); + commandPaletteOptionRefs.length = 0; + setCommandPaletteActiveIndex(0); + }); + + + + return ( +
+
+ + +
+
+
+

+ {showingSettings() ? "Settings" : selectedSessionTitle()} +

+ + + +
+ +
+ + +
+ +
+
+
+ setExpandedStepIds((current) => updater(current))} + workspaceRoot="/Users/benjaminshafii/openwork-enterprise/_repos/openwork" + /> + } + > +
+ + +
+
+
+

+ {tabLabel(settingsTab())} +

+

+ {tabDescription(settingsTab())} +

+
+
+ + +
+ General settings view mocked here. +
+
+ + +
+ Skills (installed, team catalog, GitHub hub) runs in the full app with{" "} + ExtensionsProvider. + Use the OpenWork app shell to exercise this surface. +
+
+ + + undefined} + busy={false} + selectedWorkspaceRoot={selectedWorkspaceRoot()} + isRemoteWorkspace={activeWorkspace().workspaceType === "remote"} + refreshMcpServers={refreshMcpServers} + mcpServers={storyMcpServers()} + mcpStatus="Story-book MCP sandbox ready." + mcpLastUpdatedAt={now} + mcpConnectingName={null} + selectedMcp={selectedMcp()} + setSelectedMcp={setSelectedMcp} + quickConnect={MCP_QUICK_CONNECT} + connectMcp={connectMcp} + authorizeMcp={authorizeMcp} + logoutMcpAuth={logoutMcpAuth} + removeMcp={removeMcp} + showMcpReloadBanner={false} + reloadBlocked={false} + reloadMcpEngine={() => setComposerToast("Story-book: reloaded MCP engine.")} + canEditPlugins={true} + canUseGlobalScope={true} + accessHint={null} + pluginScope={pluginScope()} + setPluginScope={setPluginScope} + pluginConfigPath={`${selectedWorkspaceRoot() || "."}/opencode.json`} + pluginList={pluginList()} + pluginInput={pluginInput()} + setPluginInput={setPluginInput} + pluginStatus={pluginStatus()} + activePluginGuide={activePluginGuide()} + setActivePluginGuide={setActivePluginGuide} + isPluginInstalled={isPluginInstalled} + suggestedPlugins={SUGGESTED_PLUGINS} + refreshPlugins={refreshPlugins} + addPlugin={addPlugin} + removePlugin={removePlugin} + /> + + + +
+ true} + restartLocalServer={async () => true} + runtimeWorkspaceId={selectedWorkspaceId()} + selectedWorkspaceRoot={selectedWorkspaceRoot()} + developerMode + /> + +
+ Remote inbox preview has been removed from the app shell. +
+
+ +
+ Switch to the remote workspace to preview the inbox panel inside Messaging. +
+
+
+
+ + + true} + /> + + + +
+
+
+
Model preferences
+
+ This preview mirrors the default model and reasoning controls from the app picker. +
+
+
+
+
{selectedModelLabel()}
+
+ {selectedModel().providerID}/{selectedModel().modelID} +
+
+ +
+
+
{selectedBehavior().title}
+
{selectedBehavior().label}
+
{selectedBehavior().description}
+
+
+
+
+ + +
+ Advanced settings view mocked here. +
+
+ + +
+ Appearance settings view mocked here. +
+
+ + +
+ Updates settings view mocked here. +
+
+ + +
+ Recovery settings view mocked here. +
+
+ + +
+ Debug settings view mocked here. +
+
+
+
+
+
+
+
+ + + undefined} + onDraftChange={handleDraftChange} + selectedModelLabel={selectedModelLabel()} + onModelClick={() => openModelPicker("session")} + modelVariantLabel={`${selectedBehavior().title} · ${selectedBehavior().label}`} + modelVariant={modelVariant()} + modelBehaviorOptions={selectedBehavior().options} + onModelVariantChange={(value) => setModelVariant(value)} + agentLabel={agentLabel()} + selectedAgent={selectedAgent()} + agentPickerOpen={agentPickerOpen()} + agentPickerBusy={false} + agentPickerError={null} + agentOptions={[]} + onToggleAgentPicker={() => setAgentPickerOpen((current) => !current)} + onSelectAgent={(agent) => { + setSelectedAgent(agent); + setAgentPickerOpen(false); + }} + setAgentPickerRef={() => undefined} + notice={composerToast() ? { title: composerToast() } : null} + onNotice={(notice) => setComposerToast(notice.title)} + listAgents={async () => []} + recentFiles={workingFiles} + searchFiles={async (query) => { + const normalized = query.trim().toLowerCase(); + if (!normalized) return workingFiles.slice(0, 8); + return workingFiles.filter((path) => path.toLowerCase().includes(normalized)).slice(0, 8); + }} + isRemoteWorkspace={selectedWorkspaceId() === remoteWorkspace.id} + isSandboxWorkspace={selectedWorkspaceId() === remoteWorkspace.id} + attachmentsEnabled + attachmentsDisabledReason={null} + skills={[]} + listCommands={async () => commandOptions} + onOpenSettings={() => undefined} + /> + + + undefined} + onOpenSettings={() => { + setSettingsTab("general"); + setShowingSettings((prev) => !prev); + }} + providerConnectedIds={["anthropic", "openai"]} + statusLabel="Session Ready" + /> +
+
+ + +
+
event.stopPropagation()} + > +
+
+ + + + + (commandPaletteInputEl = el)} + type="text" + value={commandPaletteQuery()} + onInput={(event) => { + setCommandPaletteQuery(event.currentTarget.value); + setCommandPaletteActiveIndex(0); + }} + placeholder={commandPalettePlaceholder()} + class="min-w-0 flex-1 bg-transparent text-sm text-dls-text placeholder:text-dls-secondary focus:outline-none" + aria-label={commandPaletteTitle()} + /> + +
+
{commandPaletteTitle()}
+
+ +
+ 0} + fallback={
No matches.
} + > + + {(item, idx) => ( + + )} + +
+
+ +
+ Arrow keys to navigate + Enter to run · Esc to close +
+
+
+
+ + setShareWorkspaceId(null)} + workspaceName={shareWorkspaceName()} + workspaceDetail={shareWorkspaceDetail()} + fields={[...mockShareFields]} + note="This is the real share modal from the app, mounted with safe mock values for shell review." + onExportConfig={() => setComposerToast("Story-book: export config is mocked in this shell.")} + exportDisabledReason={null} + onOpenBots={() => setComposerToast("Story-book: bots sharing flow is mocked in this shell.")} + /> + + { + if (createWorkspaceSubmitting()) return; + setCreateWorkspaceOpen(false); + }} + onConfirm={confirmMockWorkspaceCreate} + onPickFolder={pickMockWorkspaceFolder} + submitting={createWorkspaceSubmitting()} + /> + + { + if (model.providerID !== selectedModel().providerID || model.modelID !== selectedModel().modelID) return; + const entry = storyModels.find((item) => item.ref.providerID === model.providerID && item.ref.modelID === model.modelID); + if (!entry) return; + setModelVariant(sanitizeModelBehaviorValue(model.providerID, entry.model, value) ?? null); + }} + onOpenSettings={() => { + setModelPickerOpen(false); + setShowingSettings(true); + }} + onClose={() => setModelPickerOpen(false)} + /> +
+ ); +} diff --git a/apps/story-book/src/story-book.tsx b/apps/story-book/src/story-book.tsx new file mode 100644 index 0000000000..4c7e9302c0 --- /dev/null +++ b/apps/story-book/src/story-book.tsx @@ -0,0 +1,1224 @@ +import { For, Show, createEffect, createMemo, createSignal, onCleanup } from "solid-js"; +import type { Component, JSX } from "solid-js"; +import { + Box, + ChevronLeft, + ChevronRight, + MessageCircle, + Redo2, + Search, + SlidersHorizontal, + Undo2, + X, + Zap, +} from "lucide-solid"; + +import Button from "../../app/src/app/components/button"; +import DenSettingsPanel from "../../app/src/app/components/den-settings-panel"; +import ModelPickerModal from "../../app/src/app/components/model-picker-modal"; +import StatusBar from "../../app/src/app/components/status-bar"; +import Composer from "../../app/src/app/components/session/composer"; +import MessageList from "../../app/src/app/components/session/message-list"; +import WorkspaceSessionList from "../../app/src/app/components/session/workspace-session-list"; +import { + CreateWorkspaceModal, + ShareWorkspaceModal, +} from "../../app/src/app/workspace"; +import { createWorkspaceShellLayout } from "../../app/src/app/lib/workspace-shell-layout"; +import { getModelBehaviorSummary, sanitizeModelBehaviorValue } from "../../app/src/app/lib/model-behavior"; +import { + applyThemeMode, + getInitialThemeMode, + persistThemeMode, + subscribeToSystemTheme, + type ThemeMode, +} from "../../app/src/app/theme"; +import type { + ComposerDraft, + McpStatusMap, + MessageWithParts, + ModelOption, + ModelRef, + ProviderListItem, + SlashCommandOption, + WorkspaceConnectionState, + WorkspacePreset, + WorkspaceSessionGroup, +} from "../../app/src/app/types"; +import { sessionMessages, storyWorkspaces } from "./mock-data"; + +type RightRailNav = "skills" | "extensions" | "messaging" | "advanced"; +type CommandPaletteMode = "root" | "sessions"; + +type CommandPaletteItem = { + id: string; + title: string; + detail?: string; + meta?: string; + action: () => void; +}; + +const localWorkspace = storyWorkspaces[0] ?? { + id: "local-foundation", + name: "Local Foundation", + displayName: "OpenWork App", + path: "~/OpenWork/app", + preset: "starter", + workspaceType: "local" as const, +}; + +const remoteWorkspace = storyWorkspaces[1] ?? { + id: "remote-worker", + name: "Remote Worker", + displayName: "Ops Worker", + path: "remote://ops-worker", + preset: "automation", + workspaceType: "remote" as const, + remoteType: "openwork" as const, + baseUrl: "https://worker.openworklabs.com/opencode", + openworkHostUrl: "https://worker.openworklabs.com", + openworkWorkspaceName: "Ops Worker", + sandboxBackend: "docker" as const, + sandboxContainerName: "openwork-ops-worker", +}; + +const now = Date.now(); + +const workspaceSessionGroups: WorkspaceSessionGroup[] = [ + { + workspace: localWorkspace, + status: "ready", + sessions: [ + { + id: "sb-session-shell", + title: "Story shell parity with session.tsx", + slug: "story-shell-parity", + time: { updated: now - 2 * 60 * 1000, created: now - 22 * 60 * 1000 }, + }, + { + id: "sb-session-provider", + title: "Provider states and status rail", + slug: "provider-states", + time: { updated: now - 18 * 60 * 1000, created: now - 56 * 60 * 1000 }, + }, + { + id: "sb-session-mobile", + title: "Mobile shell spacing pass", + slug: "mobile-shell-pass", + time: { updated: now - 56 * 60 * 1000, created: now - 3 * 60 * 60 * 1000 }, + }, + ], + }, + { + workspace: remoteWorkspace, + status: "ready", + sessions: [ + { + id: "sb-session-remote", + title: "Remote worker onboarding", + slug: "remote-worker-onboarding", + time: { updated: now - 7 * 60 * 1000, created: now - 2 * 60 * 60 * 1000 }, + }, + { + id: "sb-session-inbox", + title: "Inbox upload behavior", + slug: "inbox-upload", + time: { updated: now - 35 * 60 * 1000, created: now - 6 * 60 * 60 * 1000 }, + }, + ], + }, +]; + +const workspaceConnectionStateById: Record = { + [localWorkspace.id]: { status: "connected", message: "Local engine ready" }, + [remoteWorkspace.id]: { status: "connected", message: "Connected via token" }, +}; + +const sessionStatusById: Record = { + "sb-session-shell": "running", + "sb-session-provider": "idle", + "sb-session-mobile": "idle", + "sb-session-remote": "idle", + "sb-session-inbox": "idle", +}; + +const mcpStatuses: McpStatusMap = { + browser: { status: "connected" }, + notion: { status: "connected" }, + linear: { status: "needs_auth" }, +}; + +const workingFiles = [ + "apps/story-book/src/story-book.tsx", + "apps/app/src/app/pages/session.tsx", + "apps/app/src/app/components/session/workspace-session-list.tsx", + "apps/app/src/app/components/session/inbox-panel.tsx", +]; + +const commandOptions: SlashCommandOption[] = [ + { id: "design-review", name: "design-review", description: "Open a design review pass", source: "command" }, + { id: "test-flow", name: "test-flow", description: "Run shell flow checks", source: "skill" }, +]; + +const storyModels: Array<{ + ref: ModelRef; + title: string; + description: string; + isConnected: boolean; + model: ProviderListItem["models"][string]; +}> = [ + { + ref: { providerID: "anthropic", modelID: "claude-sonnet-4-5-20250929" }, + title: "Claude Sonnet 4.5", + description: "Anthropic", + isConnected: true, + model: { + id: "claude-sonnet-4-5-20250929", + name: "Claude Sonnet 4.5", + reasoning: true, + variants: { high: {}, max: {} }, + } as unknown as ProviderListItem["models"][string], + }, + { + ref: { providerID: "openai", modelID: "gpt-5" }, + title: "GPT-5", + description: "OpenAI", + isConnected: true, + model: { + id: "gpt-5", + name: "GPT-5", + reasoning: true, + variants: { none: {}, minimal: {}, low: {}, medium: {}, high: {}, xhigh: {} }, + } as unknown as ProviderListItem["models"][string], + }, + { + ref: { providerID: "deepseek", modelID: "deepseek-r1" }, + title: "DeepSeek R1", + description: "DeepSeek", + isConnected: true, + model: { + id: "deepseek-r1", + name: "DeepSeek R1", + reasoning: true, + variants: {}, + } as unknown as ProviderListItem["models"][string], + }, + { + ref: { providerID: "openrouter", modelID: "grok-4" }, + title: "Grok 4", + description: "OpenRouter", + isConnected: false, + model: { + id: "grok-4", + name: "Grok 4", + reasoning: false, + variants: {}, + } as unknown as ProviderListItem["models"][string], + }, +]; + +const mockShareFields = [ + { + label: "Worker URL", + value: "https://worker.openworklabs.com/opencode", + hint: "Paste this into Add worker -> Connect remote.", + }, + { + label: "Password", + value: "ow_story_worker_owner_password_7f9a1b3c", + secret: true, + hint: "Use when the remote client must answer permission prompts.", + }, + { + label: "Collaborator token", + value: "ow_story_worker_collab_token_1c4d2e8a", + secret: true, + hint: "Routine access when you do not need owner-only actions.", + }, +] as const; + +function toMessageParts(id: string, role: "user" | "assistant", text: string): MessageWithParts { + return { + info: { + id, + sessionID: "story-shell-session", + role, + time: { created: Date.now() }, + } as MessageWithParts["info"], + parts: [{ type: "text", text } as MessageWithParts["parts"][number]], + }; +} + +const RightRailButton: Component<{ + label: string; + icon: JSX.Element; + active: boolean; + expanded: boolean; + onClick: () => void; +}> = (props) => ( + +); + +export default function StoryBookApp() { + const [selectedWorkspaceId, setSelectedWorkspaceId] = createSignal(localWorkspace.id); + const [selectedSessionId, setSelectedSessionId] = createSignal("sb-session-shell"); + const [rightRailNav, setRightRailNav] = createSignal("skills"); + const [themeMode] = createSignal(getInitialThemeMode()); + const [composerPrompt, setComposerPrompt] = createSignal( + "Use this mock shell to design layout changes before touching the live session runtime.", + ); + const [composerToast, setComposerToast] = createSignal(null); + const [selectedAgent, setSelectedAgent] = createSignal(null); + const [selectedModel, setSelectedModel] = createSignal(storyModels[0].ref); + const [modelVariant, setModelVariant] = createSignal("medium"); + const [modelPickerOpen, setModelPickerOpen] = createSignal(false); + const [modelPickerTarget, setModelPickerTarget] = createSignal<"default" | "session">("session"); + const [modelPickerQuery, setModelPickerQuery] = createSignal(""); + const [createWorkspaceOpen, setCreateWorkspaceOpen] = createSignal(false); + const [createWorkspaceSubmitting, setCreateWorkspaceSubmitting] = createSignal(false); + const [mockFolderPickCount, setMockFolderPickCount] = createSignal(0); + const [agentPickerOpen, setAgentPickerOpen] = createSignal(false); + const [shareWorkspaceId, setShareWorkspaceId] = createSignal(null); + const [messageRows, setMessageRows] = createSignal(sessionMessages); + const [expandedStepIds, setExpandedStepIds] = createSignal(new Set()); + const [headerActionBusy, setHeaderActionBusy] = createSignal<"undo" | "redo" | "compact" | null>(null); + const [commandPaletteOpen, setCommandPaletteOpen] = createSignal(false); + const [commandPaletteMode, setCommandPaletteMode] = createSignal("root"); + const [commandPaletteQuery, setCommandPaletteQuery] = createSignal(""); + const [commandPaletteActiveIndex, setCommandPaletteActiveIndex] = createSignal(0); + let commandPaletteInputEl: HTMLInputElement | undefined; + const commandPaletteOptionRefs: HTMLButtonElement[] = []; + + const { + leftSidebarWidth, + rightSidebarExpanded, + rightSidebarWidth, + startLeftSidebarResize, + toggleRightSidebar, + } = createWorkspaceShellLayout({ expandedRightWidth: 320 }); + + createEffect(() => { + const mode = themeMode(); + persistThemeMode(mode); + applyThemeMode(mode); + }); + + createEffect(() => { + const unsubscribeSystemTheme = subscribeToSystemTheme(() => { + if (themeMode() === "system") { + applyThemeMode("system"); + } + }); + onCleanup(() => unsubscribeSystemTheme()); + }); + + const selectedSessionTitle = createMemo(() => { + const target = selectedSessionId(); + if (!target) return "New session"; + for (const group of workspaceSessionGroups) { + const found = group.sessions.find((session) => session.id === target); + if (found) return found.title; + } + return "New session"; + }); + const showingSettings = createMemo(() => rightRailNav() === "advanced"); + const activeWorkspace = createMemo( + () => workspaceSessionGroups.find((group) => group.workspace.id === selectedWorkspaceId())?.workspace ?? localWorkspace, + ); + const shareWorkspace = createMemo( + () => storyWorkspaces.find((workspace) => workspace.id === shareWorkspaceId()) ?? null, + ); + const shareWorkspaceName = createMemo( + () => shareWorkspace()?.displayName?.trim() || shareWorkspace()?.name?.trim() || "Workspace", + ); + const shareWorkspaceDetail = createMemo(() => { + const workspace = shareWorkspace(); + if (!workspace) return null; + if (workspace.workspaceType === "remote") return workspace.baseUrl ?? workspace.path ?? null; + return workspace.path ?? null; + }); + + const agentLabel = createMemo(() => { + const name = selectedAgent() ?? "Default agent"; + return name.charAt(0).toUpperCase() + name.slice(1); + }); + + const selectedStoryModel = createMemo( + () => storyModels.find((entry) => entry.ref.providerID === selectedModel().providerID && entry.ref.modelID === selectedModel().modelID) + ?? storyModels[0], + ); + + const selectedBehavior = createMemo(() => + getModelBehaviorSummary( + selectedStoryModel().ref.providerID, + selectedStoryModel().model, + modelVariant(), + ), + ); + + const selectedModelLabel = createMemo(() => selectedStoryModel().title); + + const storyModelOptions = createMemo(() => + storyModels.map((entry) => { + const behavior = getModelBehaviorSummary(entry.ref.providerID, entry.model, modelVariant()); + return { + providerID: entry.ref.providerID, + modelID: entry.ref.modelID, + title: entry.title, + description: entry.description, + footer: entry.ref.providerID === selectedModel().providerID && entry.ref.modelID === selectedModel().modelID + ? "Current model" + : undefined, + behaviorTitle: behavior.title, + behaviorLabel: behavior.label, + behaviorDescription: behavior.description, + behaviorValue: sanitizeModelBehaviorValue(entry.ref.providerID, entry.model, modelVariant()), + behaviorOptions: behavior.options, + isFree: false, + isConnected: entry.isConnected, + isRecommended: entry.title.includes("GPT-5") || entry.title.includes("Claude") || entry.title.includes("DeepSeek"), + }; + }), + ); + + const filteredStoryModelOptions = createMemo(() => { + const query = modelPickerQuery().trim().toLowerCase(); + if (!query) return storyModelOptions(); + return storyModelOptions().filter((option) => + [ + option.title, + option.description ?? "", + option.footer ?? "", + option.behaviorTitle, + option.behaviorLabel, + option.behaviorDescription, + `${option.providerID}/${option.modelID}`, + ] + .join(" ") + .toLowerCase() + .includes(query), + ); + }); + + const openModelPicker = (target: "default" | "session" = "session") => { + setModelPickerTarget(target); + setModelPickerQuery(""); + setModelPickerOpen(true); + }; + + const openMockCreateWorkspaceModal = () => { + setCreateWorkspaceSubmitting(false); + setCreateWorkspaceOpen(true); + }; + + const pickMockWorkspaceFolder = async () => { + const folders = [ + "/Users/demo/OpenWork/client-foundation", + "/Users/demo/OpenWork/automation-lab", + "/Users/demo/OpenWork/starter-sandbox", + ]; + const next = folders[mockFolderPickCount() % folders.length] ?? folders[0]; + setMockFolderPickCount((count) => count + 1); + await new Promise((resolve) => window.setTimeout(resolve, 180)); + return next; + }; + + const confirmMockWorkspaceCreate = (preset: WorkspacePreset, folder: string | null) => { + if (!folder || createWorkspaceSubmitting()) return; + setCreateWorkspaceSubmitting(true); + window.setTimeout(() => { + setCreateWorkspaceSubmitting(false); + setCreateWorkspaceOpen(false); + setComposerToast(`Story-book: create workspace is mocked with preset \"${preset}\" at ${folder}.`); + }, 320); + }; + + const applyStoryModelSelection = (next: ModelRef) => { + const entry = storyModels.find((item) => item.ref.providerID === next.providerID && item.ref.modelID === next.modelID); + setSelectedModel(next); + if (entry) { + setModelVariant(sanitizeModelBehaviorValue(next.providerID, entry.model, modelVariant()) ?? null); + } + setModelPickerOpen(false); + }; + + const handleDraftChange = (draft: ComposerDraft) => { + setComposerPrompt(draft.text); + }; + + const handleSend = (draft: ComposerDraft) => { + const text = (draft.resolvedText ?? draft.text ?? "").trim(); + if (!text) return; + const nowStamp = Date.now(); + setMessageRows((current) => [ + ...current, + toMessageParts(`sb-user-${nowStamp}`, "user", text), + toMessageParts( + `sb-assistant-${nowStamp}`, + "assistant", + "Story-book mock response: message accepted. This uses app MessageList + Composer with local mock state.", + ), + ]); + setComposerPrompt(""); + }; + + const runMockHeaderAction = (action: "undo" | "redo" | "compact", label: string) => { + if (headerActionBusy()) return; + setHeaderActionBusy(action); + setComposerToast(`Story-book: ${label} is mocked in this shell.`); + window.setTimeout(() => setHeaderActionBusy(null), 240); + }; + + const openMockShareModal = (workspaceId?: string | null) => { + const nextId = workspaceId?.trim() || selectedWorkspaceId(); + setShareWorkspaceId(nextId); + }; + + const totalSessionCount = createMemo(() => + workspaceSessionGroups.reduce((count, group) => count + group.sessions.length, 0), + ); + + const commandPaletteSessionOptions = createMemo(() => { + const out: Array<{ + workspaceId: string; + sessionId: string; + title: string; + workspaceTitle: string; + updatedAt: number; + searchText: string; + }> = []; + + for (const group of workspaceSessionGroups) { + const workspaceId = group.workspace.id?.trim() ?? ""; + if (!workspaceId) continue; + const workspaceTitle = group.workspace.displayName?.trim() || group.workspace.name; + for (const session of group.sessions) { + const sessionId = session.id?.trim() ?? ""; + if (!sessionId) continue; + const title = session.title; + const updatedAt = session.time?.updated ?? session.time?.created ?? 0; + out.push({ + workspaceId, + sessionId, + title, + workspaceTitle, + updatedAt, + searchText: `${title} ${workspaceTitle}`.toLowerCase(), + }); + } + } + + out.sort((a, b) => b.updatedAt - a.updatedAt); + return out; + }); + + const focusCommandPaletteInput = () => { + queueMicrotask(() => { + commandPaletteInputEl?.focus(); + commandPaletteInputEl?.select(); + }); + }; + + const openCommandPalette = (mode: CommandPaletteMode = "root") => { + setCommandPaletteMode(mode); + setCommandPaletteOpen(true); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + focusCommandPaletteInput(); + }; + + const closeCommandPalette = () => { + setCommandPaletteOpen(false); + setCommandPaletteMode("root"); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + }; + + const returnToCommandRoot = () => { + if (commandPaletteMode() === "root") return; + setCommandPaletteMode("root"); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + focusCommandPaletteInput(); + }; + + const commandPaletteRootItems = createMemo(() => { + const selectedTitle = selectedSessionTitle().trim() || "Give your selected session a clearer name"; + const items: CommandPaletteItem[] = [ + { + id: "new-session", + title: "Create new session", + detail: "Start a fresh task in the current workspace", + meta: "Create", + action: () => { + closeCommandPalette(); + setComposerToast("Story-book: create new session is mocked in this shell."); + }, + }, + { + id: "workspace", + title: "Create workspace", + detail: "Open the real workspace-creation modal in the shell", + meta: "Open", + action: () => { + closeCommandPalette(); + openMockCreateWorkspaceModal(); + }, + }, + { + id: "rename-session", + title: "Rename current session", + detail: selectedTitle, + meta: "Rename", + action: () => { + closeCommandPalette(); + setComposerToast("Story-book: rename session flow is mocked in this shell."); + }, + }, + { + id: "sessions", + title: "Search sessions", + detail: `${totalSessionCount().toLocaleString()} available across workspaces`, + meta: "Jump", + action: () => { + setCommandPaletteMode("sessions"); + setCommandPaletteQuery(""); + setCommandPaletteActiveIndex(0); + focusCommandPaletteInput(); + }, + }, + { + id: "model", + title: "Change model", + detail: `${selectedModelLabel()} · ${selectedBehavior().label}`, + meta: "Open", + action: () => { + closeCommandPalette(); + openModelPicker("session"); + }, + }, + { + id: "provider", + title: "Connect provider", + detail: "Open provider connection flow", + meta: "Open", + action: () => { + closeCommandPalette(); + setComposerToast("Story-book: provider connection flow is mocked in this shell."); + }, + }, + { + id: "settings", + title: "Open settings", + detail: "Show the real settings panel in the shell", + meta: "Open", + action: () => { + closeCommandPalette(); + if (!rightSidebarExpanded()) toggleRightSidebar(); + setRightRailNav("advanced"); + }, + }, + { + id: "share", + title: "Share current workspace", + detail: activeWorkspace().displayName ?? activeWorkspace().name, + meta: "Share", + action: () => { + closeCommandPalette(); + openMockShareModal(selectedWorkspaceId()); + }, + }, + ]; + + const query = commandPaletteQuery().trim().toLowerCase(); + if (!query) return items; + return items.filter((item) => `${item.title} ${item.detail ?? ""}`.toLowerCase().includes(query)); + }); + + const commandPaletteSessionItems = createMemo(() => { + const query = commandPaletteQuery().trim().toLowerCase(); + const candidates = query + ? commandPaletteSessionOptions().filter((item) => item.searchText.includes(query)) + : commandPaletteSessionOptions(); + + return candidates.slice(0, 80).map((item) => ({ + id: `session:${item.workspaceId}:${item.sessionId}`, + title: item.title, + detail: item.workspaceTitle, + meta: item.workspaceId === selectedWorkspaceId() ? "Current workspace" : "Switch", + action: () => { + closeCommandPalette(); + setSelectedWorkspaceId(item.workspaceId); + setSelectedSessionId(item.sessionId); + }, + })); + }); + + const commandPaletteItems = createMemo(() => { + const mode = commandPaletteMode(); + if (mode === "sessions") return commandPaletteSessionItems(); + return commandPaletteRootItems(); + }); + + const commandPaletteTitle = createMemo(() => { + const mode = commandPaletteMode(); + if (mode === "sessions") return "Search sessions"; + return "Quick actions"; + }); + + const commandPalettePlaceholder = createMemo(() => { + const mode = commandPaletteMode(); + if (mode === "sessions") return "Find by session title or workspace"; + return "Search actions"; + }); + + const runCommandPaletteItem = (item: CommandPaletteItem) => { + closeCommandPalette(); + item.action(); + }; + + createEffect(() => { + const onKeyDown = (event: KeyboardEvent) => { + if ((event.metaKey || event.ctrlKey) && event.key.toLowerCase() === "k") { + event.preventDefault(); + if (commandPaletteOpen()) { + closeCommandPalette(); + } else { + openCommandPalette(); + } + return; + } + + if (!commandPaletteOpen()) return; + + if (event.key === "Escape") { + event.preventDefault(); + closeCommandPalette(); + return; + } + + if (event.key === "Backspace" && !commandPaletteQuery().trim() && commandPaletteMode() !== "root") { + event.preventDefault(); + returnToCommandRoot(); + return; + } + + const items = commandPaletteItems(); + if (event.key === "ArrowDown") { + event.preventDefault(); + if (!items.length) return; + setCommandPaletteActiveIndex((index) => (index + 1) % items.length); + return; + } + + if (event.key === "ArrowUp") { + event.preventDefault(); + if (!items.length) return; + setCommandPaletteActiveIndex((index) => (index - 1 + items.length) % items.length); + return; + } + + if (event.key === "Enter") { + event.preventDefault(); + const item = items[commandPaletteActiveIndex()]; + if (!item) return; + runCommandPaletteItem(item); + } + }; + window.addEventListener("keydown", onKeyDown); + onCleanup(() => window.removeEventListener("keydown", onKeyDown)); + }); + + createEffect(() => { + const items = commandPaletteItems(); + const index = commandPaletteActiveIndex(); + if (items.length === 0) { + setCommandPaletteActiveIndex(0); + return; + } + if (index >= items.length) { + setCommandPaletteActiveIndex(items.length - 1); + } + }); + + createEffect(() => { + if (!commandPaletteOpen()) return; + const index = commandPaletteActiveIndex(); + queueMicrotask(() => { + commandPaletteOptionRefs[index]?.scrollIntoView({ block: "nearest" }); + }); + }); + + createEffect(() => { + if (!commandPaletteOpen()) return; + commandPaletteMode(); + commandPaletteQuery(); + commandPaletteOptionRefs.length = 0; + setCommandPaletteActiveIndex(0); + }); + + const renderRightRail = (expanded: boolean) => ( +
+
+ +
+ +
+
+ } + active={rightRailNav() === "skills"} + expanded={expanded} + onClick={() => setRightRailNav("skills")} + /> + } + active={rightRailNav() === "extensions"} + expanded={expanded} + onClick={() => setRightRailNav("extensions")} + /> + } + active={rightRailNav() === "messaging"} + expanded={expanded} + onClick={() => setRightRailNav("messaging")} + /> + } + active={rightRailNav() === "advanced"} + expanded={expanded} + onClick={() => setRightRailNav("advanced")} + /> +
+ + +
+ Remote inbox preview has been removed from the app shell. +
+
+
+
+ ); + + return ( +
+
+ + +
+
+
+

+ {showingSettings() ? "Settings" : selectedSessionTitle()} +

+ +
+ +
+ + +
+ +
+
+
+ setExpandedStepIds((current) => updater(current))} + workspaceRoot="/Users/benjaminshafii/openwork-enterprise/_repos/openwork" + /> + } + > +
+
+
+
Model preferences
+
+ This preview mirrors the default model and reasoning controls from the app picker. +
+
+
+
+
{selectedModelLabel()}
+
+ {selectedModel().providerID}/{selectedModel().modelID} +
+
+ +
+
+
{selectedBehavior().title}
+
{selectedBehavior().label}
+
{selectedBehavior().description}
+
+
+
+ This is the real `DenSettingsPanel` from the app mounted inside story-book. +
+ true} + /> +
+
+
+
+
+ + + undefined} + onDraftChange={handleDraftChange} + selectedModelLabel={selectedModelLabel()} + onModelClick={() => openModelPicker("session")} + modelVariantLabel={`${selectedBehavior().title} · ${selectedBehavior().label}`} + modelVariant={modelVariant()} + modelBehaviorOptions={selectedBehavior().options} + onModelVariantChange={(value) => setModelVariant(value)} + agentLabel={agentLabel()} + selectedAgent={selectedAgent()} + agentPickerOpen={agentPickerOpen()} + agentPickerBusy={false} + agentPickerError={null} + agentOptions={[]} + onToggleAgentPicker={() => setAgentPickerOpen((current) => !current)} + onSelectAgent={(agent) => { + setSelectedAgent(agent); + setAgentPickerOpen(false); + }} + setAgentPickerRef={() => undefined} + notice={composerToast() ? { title: composerToast() } : null} + onNotice={(notice) => setComposerToast(notice.title)} + listAgents={async () => []} + recentFiles={workingFiles} + searchFiles={async (query) => { + const normalized = query.trim().toLowerCase(); + if (!normalized) return workingFiles.slice(0, 8); + return workingFiles.filter((path) => path.toLowerCase().includes(normalized)).slice(0, 8); + }} + isRemoteWorkspace={selectedWorkspaceId() === remoteWorkspace.id} + isSandboxWorkspace={selectedWorkspaceId() === remoteWorkspace.id} + attachmentsEnabled + attachmentsDisabledReason={null} + skills={[]} + listCommands={async () => commandOptions} + onOpenSettings={() => undefined} + /> + + + undefined} + onOpenSettings={() => { + if (showingSettings()) { + setRightRailNav("skills"); + return; + } + if (!rightSidebarExpanded()) toggleRightSidebar(); + setRightRailNav("advanced"); + }} + providerConnectedIds={["anthropic", "openai"]} + statusLabel="Session Ready" + /> +
+ + +
+ + +
+
event.stopPropagation()} + > +
+
+ + + + + (commandPaletteInputEl = el)} + type="text" + value={commandPaletteQuery()} + onInput={(event) => { + setCommandPaletteQuery(event.currentTarget.value); + setCommandPaletteActiveIndex(0); + }} + placeholder={commandPalettePlaceholder()} + class="min-w-0 flex-1 bg-transparent text-sm text-dls-text placeholder:text-dls-secondary focus:outline-none" + aria-label={commandPaletteTitle()} + /> + +
+
{commandPaletteTitle()}
+
+ +
+ 0} + fallback={
No matches.
} + > + + {(item, idx) => ( + + )} + +
+
+ +
+ Arrow keys to navigate + Enter to run · Esc to close +
+
+
+
+ + setShareWorkspaceId(null)} + workspaceName={shareWorkspaceName()} + workspaceDetail={shareWorkspaceDetail()} + fields={[...mockShareFields]} + note="This is the real share modal from the app, mounted with safe mock values for shell review." + onExportConfig={() => setComposerToast("Story-book: export config is mocked in this shell.")} + exportDisabledReason={null} + onOpenBots={() => setComposerToast("Story-book: bots sharing flow is mocked in this shell.")} + /> + + { + if (createWorkspaceSubmitting()) return; + setCreateWorkspaceOpen(false); + }} + onConfirm={confirmMockWorkspaceCreate} + onPickFolder={pickMockWorkspaceFolder} + submitting={createWorkspaceSubmitting()} + /> + + { + if (model.providerID !== selectedModel().providerID || model.modelID !== selectedModel().modelID) return; + const entry = storyModels.find((item) => item.ref.providerID === model.providerID && item.ref.modelID === model.modelID); + if (!entry) return; + setModelVariant(sanitizeModelBehaviorValue(model.providerID, entry.model, value) ?? null); + }} + onOpenSettings={() => { + setModelPickerOpen(false); + if (!rightSidebarExpanded()) toggleRightSidebar(); + setRightRailNav("advanced"); + }} + onClose={() => setModelPickerOpen(false)} + /> +
+ ); +} diff --git a/apps/story-book/tsconfig.json b/apps/story-book/tsconfig.json new file mode 100644 index 0000000000..d33d2a1b33 --- /dev/null +++ b/apps/story-book/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "../app/tsconfig.json", + "include": ["src", "../app/src", "vite.config.ts"] +} diff --git a/apps/story-book/vite.config.ts b/apps/story-book/vite.config.ts new file mode 100644 index 0000000000..30e3028887 --- /dev/null +++ b/apps/story-book/vite.config.ts @@ -0,0 +1,37 @@ +import os from "node:os"; +import { defineConfig } from "vite"; +import tailwindcss from "@tailwindcss/vite"; +import solid from "vite-plugin-solid"; + +const portValue = Number.parseInt(process.env.PORT ?? "", 10); +const devPort = Number.isFinite(portValue) && portValue > 0 ? portValue : 5176; +const allowedHosts = new Set(); +const envAllowedHosts = process.env.VITE_ALLOWED_HOSTS ?? ""; + +const addHost = (value?: string | null) => { + const trimmed = value?.trim(); + if (!trimmed) return; + allowedHosts.add(trimmed); +}; + +envAllowedHosts.split(",").forEach(addHost); +addHost(process.env.OPENWORK_PUBLIC_HOST ?? null); +const hostname = os.hostname(); +addHost(hostname); +const shortHostname = hostname.split(".")[0]; +if (shortHostname && shortHostname !== hostname) { + addHost(shortHostname); +} + +export default defineConfig({ + publicDir: "../app/public", + plugins: [tailwindcss(), solid()], + server: { + port: devPort, + strictPort: true, + ...(allowedHosts.size > 0 ? { allowedHosts: Array.from(allowedHosts) } : {}), + }, + build: { + target: "esnext", + }, +}); diff --git a/apps/ui-demo/index.html b/apps/ui-demo/index.html new file mode 100644 index 0000000000..66f28a9a46 --- /dev/null +++ b/apps/ui-demo/index.html @@ -0,0 +1,12 @@ + + + + + + OpenWork UI Demo + + +
+ + + diff --git a/apps/ui-demo/package.json b/apps/ui-demo/package.json new file mode 100644 index 0000000000..2cb9bde604 --- /dev/null +++ b/apps/ui-demo/package.json @@ -0,0 +1,24 @@ +{ + "name": "@openwork/ui-demo", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "pnpm --dir ../../packages/ui build && vite --host 0.0.0.0 --port 3333 --strictPort", + "build": "pnpm --dir ../../packages/ui build && vite build", + "preview": "vite preview --host 0.0.0.0 --port 3333 --strictPort", + "typecheck": "pnpm --dir ../../packages/ui build && tsc -p tsconfig.json --noEmit" + }, + "dependencies": { + "@openwork/ui": "workspace:*", + "react": "19.2.4", + "react-dom": "19.2.4" + }, + "devDependencies": { + "@types/react": "19.2.14", + "@types/react-dom": "19.2.3", + "@vitejs/plugin-react": "^5.0.4", + "typescript": "^5.9.3", + "vite": "^7.1.12" + } +} diff --git a/apps/ui-demo/src/app.tsx b/apps/ui-demo/src/app.tsx new file mode 100644 index 0000000000..76a3ff7273 --- /dev/null +++ b/apps/ui-demo/src/app.tsx @@ -0,0 +1,216 @@ +import { + PaperGrainGradient, + PaperMeshGradient, + getSeededPaperGrainGradientConfig, + getSeededPaperMeshGradientConfig, +} from "@openwork/ui/react" +import { useMemo, useState } from "react" + +const sampleIds = [ + "om_01kmhbscaze02vp04ykqa4tcsb", + "om_01kmhbscazf4cjf1bssx6v9q9", + "ow_01kmj2wc68r1zk4n8v7j6v1n2k", +] + +export function App() { + const [seed, setSeed] = useState(sampleIds[0]) + const normalizedSeed = seed.trim() || sampleIds[0] + const parsedSeed = parseTypeId(normalizedSeed) + const meshConfig = useMemo(() => getSeededPaperMeshGradientConfig(normalizedSeed), [normalizedSeed]) + const grainConfig = useMemo(() => getSeededPaperGrainGradientConfig(normalizedSeed), [normalizedSeed]) + + return ( +
+
+
+ +
+
+ OpenWork UI demo +

Seeded Paper gradients on their own dev surface

+

+ Type a TypeID-like string, inspect the deterministic values derived from it, and preview + the gradients that `@openwork/ui/react` will render anywhere else in the repo. +

+
+ +
+ Deterministic + Same seed, same result. +

Useful for stable identity-driven art direction across apps.

+
+
+ +
+
+ + setSeed(event.target.value)} + spellCheck={false} + /> + +
+ {sampleIds.map((sampleId) => ( + + ))} +
+
+ +
+ + + + +
+
+ +
+ } + /> + + } + /> +
+ +
+
+ Determinism check +
+ + + + + + +
+

+ These two cards use the same seed and should always match. +

+
+ +
+ Import paths +
+ @openwork/ui/react + @openwork/ui/solid +
+
{`import { PaperMeshGradient, PaperGrainGradient } from "@openwork/ui/react"
+
+
+`}
+
+
+
+ ) +} + +function GradientCard({ + title, + subtitle, + colors, + config, + surface, +}: { + title: string + subtitle: string + colors: string[] + config: Record + surface: React.ReactNode +}) { + return ( +
+
+ {surface} +
+
+ @openwork/ui/react +

{title}

+

{subtitle}

+
+
+ +
+
+ Colors +
+ {colors.map((color) => ( +
+ + {color} +
+ ))} +
+
+ +
+ Calculated values +
{JSON.stringify(config, null, 2)}
+
+
+
+ ) +} + +function MiniPreview({ title, children }: { title: string; children: React.ReactNode }) { + return ( +
+ {title} +
{children}
+
+ ) +} + +function SeedMeta({ label, value }: { label: string; value: string }) { + return ( +
+ {label} + {value} +
+ ) +} + +function parseTypeId(value: string) { + const separatorIndex = value.indexOf("_") + + if (separatorIndex === -1) { + return { + prefix: null, + suffix: value, + suffixAnchor: value.slice(0, 5) || null, + suffixTail: value.slice(5) || null, + } + } + + const prefix = value.slice(0, separatorIndex) || null + const suffix = value.slice(separatorIndex + 1) || null + + return { + prefix, + suffix, + suffixAnchor: suffix?.slice(0, 5) || null, + suffixTail: suffix?.slice(5) || null, + } +} diff --git a/apps/ui-demo/src/main.tsx b/apps/ui-demo/src/main.tsx new file mode 100644 index 0000000000..8624da889e --- /dev/null +++ b/apps/ui-demo/src/main.tsx @@ -0,0 +1,10 @@ +import React from "react" +import ReactDOM from "react-dom/client" +import { App } from "./app" +import "./styles.css" + +ReactDOM.createRoot(document.getElementById("root")!).render( + + + , +) diff --git a/apps/ui-demo/src/styles.css b/apps/ui-demo/src/styles.css new file mode 100644 index 0000000000..69007a8995 --- /dev/null +++ b/apps/ui-demo/src/styles.css @@ -0,0 +1,343 @@ +:root { + color-scheme: light; + font-family: "IBM Plex Sans", "Inter", system-ui, sans-serif; + background: + radial-gradient(circle at top left, rgba(57, 181, 74, 0.16), transparent 28%), + radial-gradient(circle at top right, rgba(39, 98, 255, 0.14), transparent 30%), + linear-gradient(180deg, #f6f1e8 0%, #efe7d7 100%); + color: #1f2c2b; +} + +* { + box-sizing: border-box; +} + +body { + margin: 0; + min-width: 320px; +} + +button, +input, +textarea, +select { + font: inherit; +} + +code, +pre, +.sample-chip, +.seed-input { + font-family: "IBM Plex Mono", "SFMono-Regular", ui-monospace, monospace; +} + +.app-shell { + position: relative; + min-height: 100vh; + padding: 32px; + overflow: hidden; +} + +.ambient { + position: fixed; + pointer-events: none; + border-radius: 999px; + filter: blur(70px); + opacity: 0.45; +} + +.ambient-a { + top: -120px; + left: -80px; + width: 320px; + height: 320px; + background: rgba(76, 175, 80, 0.22); +} + +.ambient-b { + right: -80px; + bottom: 80px; + width: 280px; + height: 280px; + background: rgba(59, 130, 246, 0.2); +} + +.panel { + position: relative; + border: 1px solid rgba(24, 30, 28, 0.08); + background: rgba(255, 251, 245, 0.82); + backdrop-filter: blur(18px); + border-radius: 28px; + box-shadow: 0 24px 80px -48px rgba(29, 24, 17, 0.45); +} + +.hero-card { + display: grid; + gap: 24px; + grid-template-columns: minmax(0, 1.5fr) minmax(260px, 0.8fr); + padding: 32px; +} + +.hero-copy h1 { + margin: 10px 0 0; + font-size: clamp(2.4rem, 6vw, 4.5rem); + line-height: 0.92; + letter-spacing: -0.08em; +} + +.hero-copy p, +.rule-card p, +.support-copy, +.surface-copy p { + margin: 0; + color: #516160; + line-height: 1.7; +} + +.hero-copy p { + margin-top: 18px; + max-width: 62ch; +} + +.rule-card { + align-self: end; + padding: 20px; + border-radius: 24px; + background: #16201f; + color: #f0f7f3; +} + +.rule-card p { + margin-top: 8px; + color: rgba(240, 247, 243, 0.74); +} + +.eyebrow { + display: inline-block; + font-size: 0.72rem; + font-weight: 700; + letter-spacing: 0.18em; + text-transform: uppercase; +} + +.muted { + color: #6d7a79; +} + +.on-dark { + color: rgba(255, 255, 255, 0.68); +} + +.controls-grid, +.footer-grid { + display: grid; + gap: 24px; + grid-template-columns: minmax(0, 1.25fr) minmax(0, 0.95fr); + margin-top: 24px; +} + +.input-panel, +.seed-meta-grid, +.code-panel { + padding: 24px; +} + +.seed-input { + width: 100%; + margin-top: 10px; + padding: 15px 16px; + border-radius: 20px; + border: 1px solid rgba(22, 32, 31, 0.12); + background: rgba(255, 255, 255, 0.92); + color: #182321; + font-size: 0.92rem; + outline: none; + transition: border-color 120ms ease, box-shadow 120ms ease; +} + +.seed-input:focus { + border-color: #287d75; + box-shadow: 0 0 0 5px rgba(40, 125, 117, 0.12); +} + +.sample-list { + display: flex; + flex-wrap: wrap; + gap: 10px; + margin-top: 16px; +} + +.sample-chip { + border: 1px solid rgba(22, 32, 31, 0.1); + background: rgba(255, 255, 255, 0.75); + color: #425251; + border-radius: 999px; + padding: 9px 14px; + cursor: pointer; +} + +.sample-chip.active { + background: #1f6978; + color: white; + border-color: #1f6978; +} + +.seed-meta-grid { + display: grid; + gap: 12px; + grid-template-columns: repeat(2, minmax(0, 1fr)); + background: #16201f; +} + +.seed-meta-card { + padding: 18px; + border-radius: 20px; + border: 1px solid rgba(255, 255, 255, 0.08); + background: rgba(255, 255, 255, 0.04); +} + +.seed-meta-card code { + display: block; + margin-top: 8px; + color: #f4fbf7; + word-break: break-all; + line-height: 1.6; +} + +.preview-grid { + display: grid; + gap: 24px; + grid-template-columns: repeat(2, minmax(0, 1fr)); + margin-top: 24px; +} + +.preview-card { + overflow: hidden; +} + +.gradient-surface { + position: relative; + min-height: 340px; + background: #101818; +} + +.gradient-fill { + position: absolute; + inset: 0; +} + +.surface-overlay { + position: absolute; + inset: 0; + background: linear-gradient(180deg, rgba(10, 18, 18, 0.08), rgba(10, 18, 18, 0.34)); +} + +.surface-copy { + position: absolute; + inset-inline: 0; + bottom: 0; + padding: 24px; + color: white; +} + +.surface-copy h2 { + margin: 10px 0 0; + font-size: 2rem; + letter-spacing: -0.05em; +} + +.surface-copy p { + margin-top: 10px; + color: rgba(255, 255, 255, 0.78); +} + +.details-stack { + display: grid; + gap: 20px; + padding: 24px; +} + +.swatch-list, +.pill-stack { + display: flex; + flex-wrap: wrap; + gap: 10px; + margin-top: 14px; +} + +.swatch-pill, +.import-pill { + display: inline-flex; + align-items: center; + gap: 10px; + border-radius: 999px; + border: 1px solid rgba(22, 32, 31, 0.08); + background: rgba(248, 243, 235, 0.9); + padding: 10px 14px; +} + +.swatch-dot { + width: 14px; + height: 14px; + border-radius: 999px; + border: 1px solid rgba(0, 0, 0, 0.12); +} + +pre { + overflow-x: auto; + margin: 14px 0 0; + border-radius: 22px; + background: #16201f; + color: #dcebe4; + padding: 18px; + font-size: 0.8rem; + line-height: 1.7; +} + +.mini-grid { + display: grid; + gap: 16px; + grid-template-columns: repeat(2, minmax(0, 1fr)); + margin-top: 16px; +} + +.mini-surface { + position: relative; + min-height: 180px; + margin-top: 8px; + overflow: hidden; + border-radius: 22px; + background: #111918; +} + +.support-copy { + margin-top: 16px; +} + +@media (max-width: 980px) { + .hero-card, + .controls-grid, + .preview-grid, + .footer-grid { + grid-template-columns: 1fr; + } +} + +@media (max-width: 640px) { + .app-shell { + padding: 18px; + } + + .hero-card, + .input-panel, + .seed-meta-grid, + .code-panel, + .details-stack { + padding: 18px; + } + + .seed-meta-grid, + .mini-grid { + grid-template-columns: 1fr; + } +} diff --git a/tsconfig.json b/apps/ui-demo/tsconfig.json similarity index 87% rename from tsconfig.json rename to apps/ui-demo/tsconfig.json index 425ff26988..2957d0dfdf 100644 --- a/tsconfig.json +++ b/apps/ui-demo/tsconfig.json @@ -4,8 +4,7 @@ "useDefineForClassFields": true, "module": "ESNext", "lib": ["ES2022", "DOM", "DOM.Iterable"], - "jsx": "preserve", - "jsxImportSource": "solid-js", + "jsx": "react-jsx", "moduleResolution": "Bundler", "resolveJsonModule": true, "isolatedModules": true, diff --git a/apps/ui-demo/vite.config.ts b/apps/ui-demo/vite.config.ts new file mode 100644 index 0000000000..4f3f02a3ee --- /dev/null +++ b/apps/ui-demo/vite.config.ts @@ -0,0 +1,19 @@ +import { defineConfig } from "vite" +import react from "@vitejs/plugin-react" + +export default defineConfig({ + plugins: [react()], + server: { + host: "0.0.0.0", + port: 3333, + strictPort: true, + }, + preview: { + host: "0.0.0.0", + port: 3333, + strictPort: true, + }, + build: { + target: "es2022", + }, +}) diff --git a/changelog/release-tracker-2026-02-19.md b/changelog/release-tracker-2026-02-19.md new file mode 100644 index 0000000000..05c4e50d22 --- /dev/null +++ b/changelog/release-tracker-2026-02-19.md @@ -0,0 +1,1322 @@ +# Release Changelog Tracker + +Internal preparation file for release summaries. This is not yet published to the changelog page or docs. + +## v0.11.100 + +#### Commit +`a4601059` + +#### Released at +`2026-02-19T17:49:05Z` + +#### Title +Composer drafts stop disappearing mid-prompt + +#### One-line summary +Fixes a session composer race so long prompts stay intact instead of getting replaced by stale draft echoes. + +#### Main changes +- Fixed stale draft echoes overriding what you were actively typing in the session composer. +- Tightened draft state tracking so long prompts stay stable during extended writing. + +#### Lines of code changed since previous release +98 lines changed since `v0.11.99` (58 insertions, 40 deletions). + +#### Release importance +Minor release: restores composer draft stability so long prompts no longer disappear while typing. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed a session composer bug where long prompts could appear to clear or get replaced while you were typing. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.101 + +#### Commit +`87fda845` + +#### Released at +`2026-02-19T21:26:55Z` + +#### Title +Local migration repair and clearer Soul controls + +#### One-line summary +Adds a desktop recovery path for broken local OpenCode migrations and makes Soul setup plus compact action buttons easier to steer. + +#### Main changes +Added migration repair from onboarding and Settings so broken local startup can recover without leaving OpenWork. + +Also released: + +- Clearer Soul starter steering and observability. +- Cleaner compact action buttons across settings and sidebars. + +#### Lines of code changed since previous release +1248 lines changed since `v0.11.100` (933 insertions, 315 deletions). + +#### Release importance +Minor release: improves local recovery, Soul steering, and interface clarity without changing the product's overall shape. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added clearer Soul starter observability and steering controls in the app. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Added a migration recovery flow so broken local OpenCode database state can be repaired from the app experience. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.102 + +#### Commit +`f728cc3b` + +#### Released at +`2026-02-20T00:00:11Z` + +#### Title +Migration recovery explains when it can help + +#### One-line summary +Clarifies when repair is actually available in the app and resets the landing page back to broader OpenWork messaging. + +#### Main changes +- Added disabled-state feedback and clearer reasons when migration recovery is unavailable in onboarding and Settings. +- Reverted the homepage copy from a workers-heavy framing back to the broader OpenWork message. + +#### Lines of code changed since previous release +168 lines changed since `v0.11.101` (100 insertions, 68 deletions). + +#### Release importance +Minor release: improves recovery-flow clarity with a focused troubleshooting UX patch. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Users can now see more clearly when migration recovery is available instead of guessing whether the repair flow should work. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.103 + +#### Commit +`a1b7a5e1` + +#### Released at +`2026-02-20T00:41:17Z` + +#### Title +Soul setup now runs safely, and sidebar sessions stay scoped + +#### One-line summary +Prevents Soul quickstart content from being injected as raw prompt text and keeps sidebar session state tied to the active workspace root. + +#### Main changes +- Switched Soul enable flows to run through the slash-command path instead of sending template text directly. +- Scoped sidebar session sync by workspace root so session state does not bleed across workspaces. + +#### Lines of code changed since previous release +83 lines changed since `v0.11.102` (47 insertions, 36 deletions). + +#### Release importance +Major release: patches a meaningful Soul template security issue while also improving core multi-workspace behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Blocked Soul template prompt-injection behavior in app surfaces that expose Soul flows. +- Fixed sidebar sync so state no longer bleeds across different workspace roots as easily. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.104 + +#### Commit +`091f13d2` + +#### Released at +`2026-02-20T04:45:27Z` + +#### Title +Session follow mode is now in your hands + +#### One-line summary +Adds explicit follow-latest and jump-to-latest controls so streaming output stops interrupting people who scroll back to read. + +#### Main changes +- Added a follow-latest toggle and a jump-to-latest button in the session timeline. +- Turned off auto-follow as soon as you scroll away from the live tail. + +#### Lines of code changed since previous release +211 lines changed since `v0.11.103` (123 insertions, 88 deletions). + +#### Release importance +Minor release: fixes an annoying session reading behavior without materially changing the surrounding workflow. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed session follow-scroll so it respects user scrolling instead of repeatedly pulling the view back to the live tail. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.105 + +#### Commit +`45f5f07d` + +#### Released at +`2026-02-20T05:12:11Z` + +#### Title +Session timelines stop auto-following altogether + +#### One-line summary +Removes the remaining automatic follow behavior so live output no longer drags the view while you read older messages. + +#### Main changes +- Removed automatic session follow scrolling during new output and sends. +- Left only a manual jump-to-latest affordance when you are away from the bottom. + +#### Lines of code changed since previous release +129 lines changed since `v0.11.104` (25 insertions, 104 deletions). + +#### Release importance +Minor release: removes a disruptive session auto-scroll behavior with a tightly scoped UI fix. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Removed the automatic session scroll-follow behavior that was still causing unwanted movement while users reviewed prior output. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.106 + +#### Commit +`4e9260b9` + +#### Released at +`2026-02-20T05:19:07Z` + +#### Title +Packaging-only release lockfile maintenance + +#### One-line summary +Refreshes package lock metadata for the release line, with no clear app, desktop, web, or workflow change. + +#### Main changes +Updated release package metadata only, with no notable user-facing or developer-facing workflow change. + +#### Lines of code changed since previous release +26 lines changed since `v0.11.105` (13 insertions, 13 deletions). + +#### Release importance +Minor release: refreshes release metadata only, with no intended user-facing product change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.107 + +#### Commit +`76a307fc` + +#### Released at +`2026-02-20T05:40:27Z` + +#### Title +Reopening sessions no longer snaps you back to the top + +#### One-line summary +Fixes a revisit-specific session bug so returning to an existing conversation preserves a steadier reading position. + +#### Main changes +- Stopped reopened sessions from reinitializing scroll position back to the top. +- Limited top-of-thread initialization to the first visit for each session. + +#### Lines of code changed since previous release +43 lines changed since `v0.11.106` (29 insertions, 14 deletions). + +#### Release importance +Minor release: fixes another focused session scrolling regression without changing the overall product experience. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed repeated session resets to the top of the timeline. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.108 + +#### Commit +`3ae49df6` + +#### Released at +`2026-02-20T18:14:52Z` + +#### Title +Readable share pages, sturdier Soul flows, safer drafts + +#### One-line summary +Makes shared bundles inspectable in the browser, preserves unsent draft text across tab switches, and strengthens Soul activation and audit flows. + +#### Main changes +- Added human-readable bundle pages with raw JSON and download fallback for share links. +- Preserved composer drafts across tab switches. +- Hardened Soul setup and added clearer activation audit and steering flows. + +#### Lines of code changed since previous release +1160 lines changed since `v0.11.107` (966 insertions, 194 deletions). + +#### Release importance +Minor release: adds a meaningful sharing improvement and reliability fixes without materially reshaping how OpenWork works overall. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added browser-friendly share bundle pages with automatic JSON fallback. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Hardened Soul enable and steering audit flows so they fail less often in user-visible app paths. +- Preserved composer drafts when switching tabs. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.109 + +#### Commit +`a896defd` + +#### Released at +`2026-02-20T20:51:01Z` + +#### Title +Safer automation setup, grouped skills, and global MCP config + +#### One-line summary +Keeps automations hidden until the scheduler is installed and lets OpenWork pick up domain-organized skills plus machine-level MCP servers. + +#### Main changes +- Hid Automations until the scheduler is installed. +- Added support for skills stored in domain folders. +- Included global MCP servers alongside workspace config. + +#### Lines of code changed since previous release +410 lines changed since `v0.11.108` (321 insertions, 89 deletions). + +#### Release importance +Minor release: improves setup predictability and expands advanced configuration support without changing the core product model. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added support for domain-grouped skill folders. +- Added support for global MCP configuration alongside project-local config. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Prevented automations from appearing as available before the scheduler dependency is installed. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.110 + +#### Commit +`8f869772` + +#### Released at +`2026-02-20T22:35:16Z` + +#### Title +Release packaging and deploy hardening only + +#### One-line summary +Hardens updater metadata generation and share-service deploy behavior, with no visible OpenWork app or workflow changes. + +#### Main changes +Mostly packaging only: the release now publishes deterministic updater metadata and skips unnecessary desktop builds during share-service deploys. + +#### Lines of code changed since previous release +294 lines changed since `v0.11.109` (269 insertions, 25 deletions). + +#### Release importance +Minor release: hardens release and deploy infrastructure without introducing intended user-facing product changes. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.111 + +#### Commit +`12847be3` + +#### Released at +`2026-02-20T23:04:52Z` + +#### Title +Version metadata sync only + +#### One-line summary +Republishes synchronized package and desktop version metadata, with no intended OpenWork app, server, or workflow changes. + +#### Main changes +Packaging only: this release just keeps version numbers and shipped metadata aligned across app, desktop, server, router, and orchestrator packages. + +#### Lines of code changed since previous release +26 lines changed since `v0.11.110` (13 insertions, 13 deletions). + +#### Release importance +Minor release: keeps release metadata aligned only, with no intended user-facing change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.112 + +#### Commit +`a0ceeae0` + +#### Released at +`2026-02-21T01:19:34Z` + +#### Title +Cleaner session tool timelines + +#### One-line summary +Hides step lifecycle noise and separates reasoning from tool runs so active sessions are easier to scan. + +#### Main changes +- Removed step start and finish rows from the session timeline. +- Split grouped step blocks at reasoning boundaries so tool runs read in a more natural sequence. + +#### Lines of code changed since previous release +233 lines changed since `v0.11.111` (178 insertions, 55 deletions). + +#### Release importance +Minor release: improves session readability with a focused UI cleanup while the rest of the patch stays behind the scenes. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Removed noisy lifecycle rows from the session tool timeline so users can scan meaningful progress more easily. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.113 + +#### Commit +`83af293a` + +#### Released at +`2026-02-21T01:58:50Z` + +#### Title +Cmd+K quick actions for session work + +#### One-line summary +Adds a keyboard-first palette for jumping between sessions and changing model or thinking settings without leaving chat. + +#### Main changes +- Open quick actions with Cmd+K from the session view. +- Search and jump across sessions from the same palette. +- Change the active model or thinking level in place during a live session. + +#### Lines of code changed since previous release +558 lines changed since `v0.11.112` (534 insertions, 24 deletions). + +#### Release importance +Minor release: adds a focused productivity feature that makes everyday session navigation and configuration faster. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added a keyboard-first quick-actions palette for session navigation plus model and thinking controls. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.114 + +#### Commit +`28596bf7` + +#### Released at +`2026-02-22T06:00:46Z` + +#### Title +OpenWork Cloud worker setup and reconnect flows + +#### One-line summary +Adds a guided web flow for launching cloud workers, then makes reconnects work with saved workers, usable tokens, and workspace-scoped connect links. + +#### Main changes +Adds the first full OpenWork Cloud worker setup flow in the web app. + +Also released: + +- A 3-step sign-in, checkout, and launch flow. +- Saved workers plus reusable tokens and workspace-scoped connect URLs. +- Background provisioning with polling and completed provider OAuth in the app. + +#### Lines of code changed since previous release +6726 lines changed since `v0.11.113` (6593 insertions, 133 deletions). + +#### Release importance +Major release: introduces OpenWork Cloud worker provisioning and connect flows that materially change how users can start and use remote workers. + +#### Major improvements +True + +#### Number of major improvements +4 + +#### Major improvement details +- Added the Den control plane with real Render-backed cloud workers inside OpenWork. +- Shipped a new 3-step cloud worker setup experience in the web app. +- Persisted user workers and removed manual worker ID recovery from the hosted flow. +- Gated cloud workers behind Polar entitlements with a default hosted worker plan. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +5 + +#### Major bug fix details +- Completed the provider OAuth connect flow inside the app modal. +- Returned compatible worker tokens for remote connect. +- Returned workspace-scoped connect URLs so cloud workers open with the right workspace context. +- Switched worker launch to asynchronous provisioning with auto-polling for better setup reliability. +- Fixed editor-mode file opening and removed reasoning text noise from the session timeline. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.115 + +#### Commit +`74048ebb` + +#### Released at +`2026-02-22T07:45:08Z` + +#### Title +Private Telegram bot pairing and sturdier hosted sign-in + +#### One-line summary +Requires explicit pairing before a private Telegram chat can control a worker and lets hosted auth recover from broken upstream HTML responses. + +#### Main changes +Private Telegram bots now stay closed until a chat is explicitly paired, and hosted sign-in fails over more cleanly when the auth proxy gets bad 5xx HTML. + /pair ABCD-1234 + +#### Lines of code changed since previous release +790 lines changed since `v0.11.114` (700 insertions, 90 deletions). + +#### Release importance +Minor release: tightens messaging security and fixes a focused hosted auth reliability issue without changing the broader product shape. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added a private Telegram bot pairing workflow that requires explicit approval before a chat can link to a workspace. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Added auth-proxy failover for 5xx HTML responses so hosted sign-in flows recover more gracefully. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.116 + +#### Commit +`a7b88238` + +#### Released at +`2026-02-22T18:26:36Z` + +#### Title +Cleaner cloud-worker connect with desktop deep links + +#### One-line summary +Turns the hosted worker page into a simpler list-detail picker and adds one-click desktop handoff into OpenWork's remote connect flow. + +#### Main changes +- Reworked hosted workers into a clearer list-detail connect view with status and action panels. +- Added `openwork://connect-remote` deep links so the desktop app can open remote-connect details directly. +- Kept manual URL and token copy available when one-click open is unavailable. + +#### Lines of code changed since previous release +870 lines changed since `v0.11.115` (664 insertions, 206 deletions). + +#### Release importance +Minor release: improves a focused cloud-worker flow by making remote connection clearer across web and desktop. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added a list-detail cloud worker connect experience in the web app. +- Wired desktop deep links so hosted remote-connect actions can open directly in the app. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.117 + +#### Commit +`adeafe5a` + +#### Released at +`2026-02-23T01:09:20Z` + +#### Title +Hosted worker connect and cleanup flows get clearer + +#### One-line summary +Reworks the web worker shell with clearer status, delete, and custom-domain handling, then makes session runs easier to scan by separating request, work, and result. + +#### Main changes +- Hosted workers now use a full-page list-detail flow with progressive disclosure instead of exposing every manual control up front. +- You can delete a worker from the web flow, and custom domains resolve more cleanly when available. +- Session timelines split each turn into request, execution, and result blocks for tool-heavy chats. + +#### Lines of code changed since previous release +2207 lines changed since `v0.11.116` (1719 insertions, 488 deletions). + +#### Release importance +Minor release: meaningfully improves hosted worker usability and session readability while staying within the existing product model. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Added worker delete support in the hosted cloud flow. +- Added custom worker domain support for hosted workers. +- Introduced explicit session turn segmentation into intent, execution, and result. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Hardened Den against transient MySQL disconnect and reset conditions. +- Recovered messaging from empty router prompt replies. +- Stopped inbox refresh churn caused by auth memo changes. +- Softened hosted 502 failures and restored the worker detail pane layout. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.118 + +#### Commit +`108d4efe` + +#### Released at +`2026-02-23T02:49:35Z` + +#### Title +Large sessions type faster, and cloud worker setup stays simpler + +#### One-line summary +Cuts composer lag in big chats, renames timeline labels in plainer language, and keeps manual worker controls tucked behind advanced options. + +#### Main changes +- Typing stays responsive deeper into large conversations by cutting composer layout churn. +- Session timelines swap technical segment names for clearer user-facing wording. +- Cloud worker pages hide manual URLs and tokens by default and recover safely when delete or custom-domain responses are incomplete. + +#### Lines of code changed since previous release +758 lines changed since `v0.11.117` (555 insertions, 203 deletions). + +#### Release importance +Minor release: improves responsiveness and clarity in existing session and hosted-worker flows without changing core behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Reduced typing lag in long sessions by cutting composer layout churn. +- Updated session labels to use clearer, user-facing wording. +- Fixed hosted worker delete responses and added a safer fallback path for vanity domains. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.119 + +#### Commit +`67844b38` + +#### Released at +`2026-02-23T05:13:07Z` + +#### Title +Long chats stay snappier, and web onboarding looks cleaner + +#### One-line summary +Further trims typing lag while tightening the landing hero, Get started path, and hosted worker layout across the web experience. + +#### Main changes +Further reduces composer reflow in heavy sessions, points Den visitors to a clearer Get started path, and makes both the landing hero and hosted worker panels use space more cleanly. + +#### Lines of code changed since previous release +308 lines changed since `v0.11.118` (197 insertions, 111 deletions). + +#### Release importance +Minor release: focuses on performance polish and presentation improvements across existing session and onboarding surfaces. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Reduced long-session composer reflow work to improve typing responsiveness in heavy chats. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.120 + +#### Commit +`6cf077b3` + +#### Released at +`2026-02-23T06:19:35Z` + +#### Title +Worker switching keeps session lists stable + +#### One-line summary +Preserves sidebar sessions while moving between workers and refreshes the landing hero with higher contrast, calmer motion, and lighter nav chrome. + +#### Main changes +- Switching workers no longer makes sidebar sessions disappear while connection state catches up. +- The landing hero gets a stronger shader, higher text contrast, slower animation, and simpler sticky navigation. + +#### Lines of code changed since previous release +150 lines changed since `v0.11.119` (94 insertions, 56 deletions). + +#### Release importance +Minor release: fixes a core navigation annoyance and adds focused landing-page polish. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed sidebar behavior so sessions remain visible while users switch across workers. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.121 + +#### Commit +`b5f7814f` + +#### Released at +`2026-02-23T06:46:26Z` + +#### Title +Session timelines read naturally, and search hits stand out + +#### One-line summary +Removes meta-heavy timeline labels, highlights search hits inside messages, and makes quick actions and composing feel faster in active chats. + +#### Main changes +- Session runs now read like a conversation instead of showing Plan, Activity, and Answer labels. +- Search highlights matching text inside messages, not just matching rows. +- Worker quick actions and composer updates feel more responsive during active chats. + +#### Lines of code changed since previous release +485 lines changed since `v0.11.120` (311 insertions, 174 deletions). + +#### Release importance +Minor release: improves the feel and readability of the core session experience without changing the broader workflow model. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added in-message search match highlighting while improving worker quick actions and composer responsiveness. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.122 + +#### Commit +`dfa41808` + +#### Released at +`2026-02-26T01:34:07Z` + +#### Title +Hosted onboarding and share links become app handoffs + +#### One-line summary +Adds GitHub sign-in, Open in App worker handoff, and share links for workspace profiles and skill sets while smoothing long-session and desktop reliability. + +#### Main changes +Hosted OpenWork now hands people straight from the web into the app for connect and import flows. + +Also released: + +- GitHub sign-in plus a dedicated download page for faster first-time setup. +- Share links for workspace profiles and skill sets, with deep links that default to a new worker. +- Long sessions render more smoothly, local file links resolve correctly, and desktop shutdown is cleaner. + +#### Lines of code changed since previous release +5651 lines changed since `v0.11.121` (4835 insertions, 816 deletions). + +#### Release importance +Major release: substantially expands how users sign up, connect, share, and navigate OpenWork across hosted and desktop flows. + +#### Major improvements +True + +#### Number of major improvements +5 + +#### Major improvement details +- Added Open in App handoff for hosted remote-connect flows. +- Simplified get-started signup and added GitHub sign-in. +- Added a dedicated download page with platform anchors and a stronger docs entrypoint. +- Added workspace profile and skills-set sharing flows. +- Added bundle-share deep links that open directly into new-worker imports. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +5 + +#### Major bug fix details +- Grouped exploration steps and cached markdown rendering to keep long sessions responsive. +- Fixed workspace-relative markdown file references so local file links open correctly. +- Stabilized workspace actions, improved share modal mobile readability, wrapped long connection URLs, and clamped long skill triggers. +- Hardened hosted auth with cookie preservation, trusted-origin defaults, callback fixes, and Polar access backfill. +- Retried transient Den signup database reads and stopped the desktop orchestrator daemon cleanly on app close. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.123 + +#### Commit +`dfd331da` + +#### Released at +`2026-02-26T05:45:34Z` + +#### Title +Clearer share links and local server recovery in Settings + +#### One-line summary +Refreshes both the in-app share modal and public bundle pages, and adds a one-click local server restart when a local worker gets stuck. + +#### Main changes +- Share Workspace becomes a cleaner split between live access details and public link publishing. +- Public bundle pages now look and read like OpenWork, with clearer import flows outside the app. +- Settings adds `Restart local server` so local recovery no longer means leaving OpenWork. + +#### Lines of code changed since previous release +1480 lines changed since `v0.11.122` (1027 insertions, 453 deletions). + +#### Release importance +Minor release: introduces two focused user-facing improvements that make sharing and local recovery noticeably better. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added a local server restart action in Settings. +- Redesigned the share modal and generated bundle page styling to match OpenWork’s product identity. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.124 + +#### Commit +`3237bfab` + +#### Released at +`2026-02-26T19:33:56Z` + +#### Title +Orbita gives sessions a clearer three-pane workspace + +#### One-line summary +Reworks the main session screen with a stronger left rail, cleaner timeline canvas, and floating composer while preserving readability across themes. + +#### Main changes +Applies the Orbita direction across the session view, with a reorganized left rail for workers and sessions, lighter inbox and artifact side panels, and a clearer floating composer and message canvas. + +#### Lines of code changed since previous release +734 lines changed since `v0.11.123` (451 insertions, 283 deletions). + +#### Release importance +Minor release: refreshes the core session experience with a substantial layout polish pass while keeping the same underlying workflow. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Applied the Orbita session layout direction across the main session interface. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed theme and contrast regressions during the layout refresh so session surfaces remain readable. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. diff --git a/changelog/release-tracker-2026-02-26.md b/changelog/release-tracker-2026-02-26.md new file mode 100644 index 0000000000..9e99712e2b --- /dev/null +++ b/changelog/release-tracker-2026-02-26.md @@ -0,0 +1,1356 @@ +# Release Changelog Tracker + +Internal preparation file for release summaries. This is not yet published to the changelog page or docs. + +## v0.11.125 + +#### Commit +`7225736f` + +#### Released at +`2026-02-26T22:26:17Z` + +#### Title +Unified workspace navigation and smoother downloads + +#### One-line summary +Unifies workspace navigation across dashboard and session views while preventing download-heavy operations from freezing the app. + +#### Main changes +- Reused the same workspace and session sidebar in dashboard and session views. +- Deduplicated equivalent remote workers and kept rows actionable during stale connects. +- Throttled download updates so large transfers stop freezing the desktop UI. + +#### Lines of code changed since previous release +710 lines changed since `v0.11.124` (160 insertions, 550 deletions). + +#### Release importance +Minor release: fixes two painful interaction problems in core navigation and system responsiveness without introducing a new workflow. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Unified sidebar and workspace switching behavior so navigation stays consistent and actionable. +- Added download throttling to prevent UI freezes during large transfers. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.126 + +#### Commit +`42f68d9b` + +#### Released at +`2026-02-27T15:47:46Z` + +#### Title +Simpler artifacts and direct workspace actions + +#### One-line summary +Simplifies artifact handling and adds direct worker and plugin actions so common cleanup and file workflows take fewer steps. + +#### Main changes +- Replaced the inline artifact markdown editor with simpler reveal and open actions. +- Added Open in Obsidian for markdown and Reveal in Finder or Explorer for local workers. +- Added direct plugin removal and worker reveal actions from the main UI. + +#### Lines of code changed since previous release +885 lines changed since `v0.11.125` (360 insertions, 525 deletions). + +#### Release importance +Minor release: simplifies artifact management and adds faster workspace controls without changing OpenWork's overall workflow model. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added direct worker and plugin quick actions so common workspace management tasks can be done from the main app surfaces. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Replaced the in-app artifact markdown editor with a simpler read-only artifact action flow. + +## v0.11.127 + +#### Commit +`7f3f70b0` + +#### Released at +`2026-02-28T02:48:07Z` + +#### Title +Get back online recovery and smarter Docker dev-up + +#### One-line summary +Makes worker recovery clearer and preserves existing access, while smoothing Docker dev stacks for developers using local OpenCode config. + +#### Main changes +- Added a plain-language Get back online action for remote worker recovery. +- Reused existing OpenWork tokens during sandbox restarts so reconnects keep working. +- Updated the legacy Docker dev stack to mount host OpenCode config and auth. + +#### Lines of code changed since previous release +370 lines changed since `v0.11.126` (325 insertions, 45 deletions). + +#### Release importance +Minor release: improves worker recovery clarity and token stability with a focused reliability update. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added a clearer in-app `Get back online` recovery action for workers. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed worker recovery so sandbox restarts can reconnect without rotating existing OpenWork tokens. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.128 + +#### Commit +`da183cf7` + +#### Released at +`2026-03-01T18:40:52Z` + +#### Title +Remote file sessions, Obsidian sync, and long-chat readability + +#### One-line summary +Adds live remote file sessions with Obsidian-backed editing, then makes long desktop conversations easier to read and follow. + +#### Main changes +- Added short-lived file sessions with catalog, event, read, write, rename, and delete batch APIs. +- Mirrored remote markdown into Obsidian and synced edits back to the worker. +- Added desktop-wide zoom shortcuts while cleaning transcript noise and live-thinking scroll behavior. + +#### Lines of code changed since previous release +2719 lines changed since `v0.11.127` (2612 insertions, 107 deletions). + +#### Release importance +Minor release: materially expands remote file workflows and readability, but does so as focused product improvements rather than a fundamental platform shift. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Added just-in-time file sessions for remote file workflows. +- Added batch sync support for mirrored remote files. +- Added desktop-wide font zoom shortcuts and whole-webview zoom for readability. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed transcript rendering so synthetic control-only parts no longer appear in the user-facing conversation. +- Fixed live thinking updates so the transcript auto-scrolls more reliably during active runs. +- Fixed recovery and desktop startup edge cases, including stale base URL restoration and blocking recover actions. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.129 + +#### Commit +`76a8217e` + +#### Released at +`2026-03-02T02:35:51Z` + +#### Title +Self-serve billing and media-rich messaging + +#### One-line summary +Expands cloud billing into a self-serve management flow and lets Slack and Telegram carry richer OpenWork Router messages. + +#### Main changes +- Added billing plan details, invoices, and subscription actions in the cloud worker dashboard. +- Extended Slack and Telegram delivery to send richer media, not just plain text. +- Hardened billing lookups and post-checkout navigation so account state refreshes more reliably. + +#### Lines of code changed since previous release +3238 lines changed since `v0.11.128` (3061 insertions, 177 deletions). + +#### Release importance +Minor release: adds two meaningful user-facing capabilities in billing and messaging without materially changing how the core product is operated. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added billing subscription controls and invoice history in the web cloud dashboard. +- Added first-class media transport for Slack and Telegram in OpenWork Router. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Improved billing flow reliability and navigation so subscription management behaves more consistently in the web experience. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.130 + +#### Commit +`d1dee3ce` + +#### Released at +`2026-03-02T16:58:05Z` + +#### Title +Service restarts and steadier local connectivity + +#### One-line summary +Adds in-app restart controls, makes router startup recover from local port conflicts, and smooths billing returns from checkout. + +#### Main changes +- Added Settings actions to restart orchestrator, OpenCode, OpenWork server, and OpenCodeRouter. +- Moved OpenCodeRouter onto conflict-free localhost health ports and retried startup failures automatically. +- Restored billing state after checkout returns and dropped Telegram self-echo loops. + +#### Lines of code changed since previous release +637 lines changed since `v0.11.129` (540 insertions, 97 deletions). + +#### Release importance +Minor release: focuses on service recovery and billing-flow reliability with targeted fixes and controls. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added in-app restart controls for local services in desktop settings. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed router startup so local connectivity is less likely to fail during desktop launch. +- Fixed billing session recovery after checkout redirects in the web cloud flow. +- Fixed Telegram router handling so bot-authored echoes no longer create noisy loops. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.131 + +#### Commit +`de9b5cc6` + +#### Released at +`2026-03-04T17:15:52Z` + +#### Title +Virtualized chats and clearer runtime status + +#### One-line summary +Keeps long sessions responsive with virtualized rendering, clearer runtime status, and optional auto-compaction after runs finish. + +#### Main changes +- Virtualized long transcripts so large chats stay responsive instead of rendering every message at once. +- Replaced split engine and server badges with one Ready indicator that opens a detailed status popover. +- Added automatic context compaction after runs, plus persistent language selection and sturdier file opening. + +#### Lines of code changed since previous release +1494 lines changed since `v0.11.130` (1134 insertions, 360 deletions). + +#### Release importance +Major release: substantially improves how users run and monitor long OpenWork sessions through rendering, status, and compaction changes across core app surfaces. + +#### Major improvements +True + +#### Number of major improvements +4 + +#### Major improvement details +- Added virtualized session rendering for long chats. +- Added a unified status indicator with a detail popover. +- Added an automatic context compaction toggle. +- Added persistent language selection in settings. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed a regression where virtualized sessions could show blank transcripts. +- Fixed editor and artifact file opening so local file targets resolve more reliably. +- Fixed cross-session visibility for pending subagent prompts so important follow-up work is easier to notice. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.132 + +#### Commit +`1f641dbf` + +#### Released at +`2026-03-05T00:06:28Z` + +#### Title +Chat-first startup and faster long-session loading + +#### One-line summary +Preserves the new-session launch state, fixes first-run setup, and makes long chats load from the latest messages with less lag. + +#### Main changes +- Kept `/session` as an empty draft view instead of bouncing users into an older chat. +- Added a first-run worker empty state, created the first chat automatically, and routed non-media uploads into inbox links. +- Opened sessions at the latest messages, paged older history on demand, and collapsed oversized markdown by default. + +#### Lines of code changed since previous release +611 lines changed since `v0.11.131` (447 insertions, 164 deletions). + +#### Release importance +Minor release: tightens startup, first-run, and transcript responsiveness issues in the core session experience. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Fixed startup so `/session` can remain an empty draft state instead of redirecting away unexpectedly. +- Fixed first-run chat creation so new users land in a usable conversation flow. +- Fixed non-media upload handling so those files go to the inbox flow correctly. +- Fixed conversation opening behavior so sessions land at the latest messages instead of an older position. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.133 + +#### Commit +`f11cad48` + +#### Released at +`2026-03-05T15:54:31Z` + +#### Title +Chat transcripts stop flickering during typing + +#### One-line summary +Keeps active and long-running sessions visually stable by fixing typing flicker, remount churn, and collapsed long-message resets. + +#### Main changes +- Fixed transcript flicker while typing in active chats. +- Kept long sessions steadier by reducing remount churn in tail-loaded message lists. +- Preserved expanded long-markdown state instead of collapsing it again mid-session. + +#### Lines of code changed since previous release +292 lines changed since `v0.11.132` (163 insertions, 129 deletions). + +#### Release importance +Minor release: delivers a focused session-rendering stability pass for active and long-running chats. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed transcript flicker that could appear while typing in active chats. +- Fixed remount churn in tail-loaded virtualized sessions. +- Fixed long-markdown collapse state so it no longer resets unexpectedly. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.134 + +#### Commit +`d1658182` + +#### Released at +`2026-03-06T07:28:11Z` + +#### Title +Remote MCP setup gets lighter, with exportable desktop diagnostics + +#### One-line summary +Makes remote-workspace MCP connection setup clearer and adds in-app debug exports, sandbox probes, and config actions for faster troubleshooting. + +#### Main changes +- Remote workspaces now steer MCP setup toward URL-based connections, with optional OAuth and safer reload prompts. +- Settings can copy or export a runtime debug report and run a sandbox probe. +- Settings can also reveal or reset workspace config without leaving the app. + +#### Lines of code changed since previous release +852 lines changed since `v0.11.133` (789 insertions, 63 deletions). + +#### Release importance +Minor release: improves remote setup and troubleshooting with targeted workflow and diagnostics additions. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Simplified remote MCP setup for remote workspaces. +- Added exportable debug reports and config actions in Settings. +- Added sandbox probe diagnostics export for desktop troubleshooting. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.135 + +#### Commit +`5d7185b4` + +#### Released at +`2026-03-06T19:43:28Z` + +#### Title +Bundled OpenCode version stays aligned across release paths + +#### One-line summary +Pins the packaged OpenCode fallback consistently across CI, prerelease, and release builds, with no notable new app workflow changes. + +#### Main changes +Keeps the bundled OpenCode fallback pinned to the same version across CI, prerelease, and release artifacts so packaged builds drift less, without introducing new user-facing OpenWork workflows. + +#### Lines of code changed since previous release +61 lines changed since `v0.11.134` (31 insertions, 30 deletions). + +#### Release importance +Minor release: tightens release-path consistency for bundled OpenCode behavior without adding new user-facing product workflows. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.136 + +#### Commit +`83593bdf` + +#### Released at +`2026-03-10T04:00:32Z` + +#### Title +OpenWork Share turns dropped files into worker packages + +#### One-line summary +Adds a real worker-packaging flow in OpenWork Share, rebuilds the share site on the Next.js App Router, and makes provider connections easier to manage. + +#### Main changes +- OpenWork Share now packages dropped skills, agents, commands, and MCP or OpenWork config into worker bundles. +- The share site moves to the Next.js App Router with refreshed home and bundle pages. +- Settings now lets users disconnect providers, while OAuth completion and sandbox startup recover more reliably. + +#### Lines of code changed since previous release +12837 lines changed since `v0.11.135` (9531 insertions, 3306 deletions). + +#### Release importance +Major release: substantially changes the share workflow and related web surfaces while also landing broad reliability and account-management improvements across core product areas. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Turned OpenWork Share into a worker packager. +- Replatformed OpenWork Share onto the Next.js App Router. +- Added provider disconnect controls in Settings. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed provider OAuth polling so connection flows complete more reliably. +- Fixed sandbox Docker preflight hangs that could block local startup. +- Fixed theme and workspace-state issues that made desktop and session behavior less predictable. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.137 + +#### Commit +`cc5700a1` + +#### Released at +`2026-03-11T06:01:10Z` + +#### Title +MCP sign-in retries and model setup get clearer + +#### One-line summary +Makes MCP OAuth flows recover more reliably and reorganizes the model picker so disconnected providers route users straight to setup. + +#### Main changes +- MCP auth now waits through reloads, reopens the browser flow clearly, and keeps retry states visible. +- The model picker separates enabled providers from setup-needed ones and links the latter to Settings. +- Remote MCP cards now expose login actions before a server is connected. + +#### Lines of code changed since previous release +734 lines changed since `v0.11.136` (562 insertions, 172 deletions). + +#### Release importance +Minor release: focuses on auth and model-selection reliability with a small follow-up packaging alignment fix. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed MCP auth connection flows so browser handoff, retry, and reconnect behavior are more reliable. +- Fixed model picker provider grouping and routing so provider setup actions are clearer and less error-prone. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.138 + +#### Commit +`5307ce16` + +#### Released at +`2026-03-11T15:19:39Z` + +#### Title +Shared bundle links now open the blueprints worker flow + +#### One-line summary +Routes shared bundle imports into the blueprints-style worker creation path so new-worker links land in the setup flow users expect. + +#### Main changes +Shared bundle deep links now open the worker-creation flow with the right blueprints preset, then continue import through the intended setup path instead of dropping users into the wrong workspace flow. + +#### Lines of code changed since previous release +143 lines changed since `v0.11.137` (101 insertions, 42 deletions). + +#### Release importance +Minor release: delivers a focused fix for shared bundle import routing without broader product-surface changes. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed shared bundle imports so they route through the blueprints flow instead of landing in the wrong setup path. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.140 + +#### Commit +`77d2f1cc` + +#### Released at +`2026-03-11T19:14:14Z` + +#### Title +Shared bundle imports land on the intended worker + +#### One-line summary +Makes shared bundle imports resolve to the exact active or newly created worker and adds more actionable sandbox startup diagnostics. + +#### Main changes +- Imports now match the active or newly created worker by workspace ID, local root, or directory hint. +- Sandbox startup logs now capture resolved Docker paths and launch arguments in the debug report. +- Failed detached-worker launches surface clearer stage and spawn diagnostics. + +#### Lines of code changed since previous release +460 lines changed since `v0.11.138` (364 insertions, 96 deletions). + +#### Release importance +Minor release: fixes import targeting and worker startup clarity without materially changing OpenWork's overall product shape. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed shared skill imports so they open on the worker that was just created instead of misrouting users afterward. +- Improved sandbox startup diagnostics so failed worker launches provide clearer recovery information. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.141 + +#### Commit +`9af84bd0` + +#### Released at +`2026-03-12T01:33:57Z` + +#### Title +App and worker opens stay on the new session screen + +#### One-line summary +Keeps launch actions anchored on the new session flow while making oversized-context errors, share feedback, and support booking clearer. + +#### Main changes +- Opening the app or a new worker now stays on the new-session screen instead of jumping away unexpectedly. +- The todo strip docks to the composer, and HTTP 413 errors now suggest compaction or a fresh session. +- OpenWork Share adds inline link-success feedback, and the Book a Call form gets clearer topic cards. + +#### Lines of code changed since previous release +5453 lines changed since `v0.11.140` (3894 insertions, 1559 deletions). + +#### Release importance +Minor release: improves session flow, share feedback, and support-entry polish without introducing a major product-level shift. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Refreshed the Book a Call form with conversation topics and a more usable layout. +- Added inline success feedback and richer content handling on OpenWork Share surfaces. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Kept app and worker open actions anchored on the new session screen. +- Docked the todo strip to the composer so long session flows feel more coherent. +- Added a clearer user-facing message for HTTP 413 context-too-large failures. +- Included stage diagnostics in sandbox probe timeout errors so desktop startup failures are easier to diagnose. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.142 + +#### Commit +`f9b586ae` + +#### Released at +`2026-03-12T01:48:01Z` + +#### Title +Version alignment patch + +#### One-line summary +Republishes synchronized app, server, orchestrator, and router versions so shipped artifacts stay in lockstep, with no visible workflow changes. + +#### Main changes +Republishes synchronized desktop, server, orchestrator, and router packages so installs resolve the same version everywhere. No clear user-facing or developer-facing workflow changes land in this patch. + +#### Lines of code changed since previous release +26 lines changed since `v0.11.141` (13 insertions, 13 deletions). + +#### Release importance +Minor release: keeps release artifacts aligned for distribution without changing how users use OpenWork. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.143 + +#### Commit +`41aeb178` + +#### Released at +`2026-03-12T20:51:40Z` + +#### Title +Free first Den worker and Google signup + +#### One-line summary +Lets new users create one free Den worker without billing, sign up with Google, and enter a much clearer cloud onboarding path. + +#### Main changes +Den now lets new users create one free cloud worker without billing and sign up with Google. + +Also released: + +- Retired remaining Soul-mode surfaces across the app and server. +- Showed session errors inline, removed raw markdown flashes, and refreshed share bundle pages and previews. + +#### Lines of code changed since previous release +9937 lines changed since `v0.11.142` (6244 insertions, 3693 deletions). + +#### Release importance +Major release: meaningfully changes the Den onboarding and cloud-worker experience while also retiring older Soul-mode surfaces. + +#### Major improvements +True + +#### Number of major improvements +5 + +#### Major improvement details +- Refreshed the Den landing page with a much fuller hero, comparison, support, and CTA flow. +- Allowed one free cloud worker without billing. +- Added Google authentication to Den signup. +- Added Den worker runtime upgrade messaging and controls. +- Restyled shared bundle pages and Open Graph previews for public sharing. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Showed session errors inline in chat instead of leaving failures harder to interpret. +- Prevented raw markdown from flashing while streaming responses render. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Removed remaining Soul mode surfaces from the app. + +## v0.11.144 + +#### Commit +`5ddc4647` + +#### Released at +`2026-03-12T22:53:50Z` + +#### Title +Workspace shell recovery and clearer docs entrypoints + +#### One-line summary +Restores reliable workspace-shell navigation and reset recovery, seeds Chrome DevTools setup correctly, and splits docs paths for technical and non-technical readers. + +#### Main changes +- Kept dashboard, session, and Settings navigation inside the workspace shell so sidebars stay reachable. +- Fully cleared desktop reset state on relaunch and seeded Control Chrome as `chrome-devtools` for smoother MCP setup. +- Split docs entrypoints into technical and non-technical paths so onboarding starts in the right place. + +#### Lines of code changed since previous release +1185 lines changed since `v0.11.143` (868 insertions, 317 deletions). + +#### Release importance +Minor release: focuses on reliability and navigation fixes plus targeted polish to Den and MCP setup. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Kept workspace shell navigation reachable across dashboard and session flows. +- Fully cleared desktop reset state on relaunch so recovery actually resets cleanly. +- Seeded Control Chrome as `chrome-devtools` so browser-tooling setup works more predictably. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.145 + +#### Commit +`8ceed304` + +#### Released at +`2026-03-13T05:47:09Z` + +#### Title +Den admin backoffice and support routing + +#### One-line summary +Adds a protected Den admin panel for support operations, routes enterprise contact submissions into Loops, and tightens desktop skill reload and settings diagnostics. + +#### Main changes +- Added a protected Den admin backoffice with signup, worker, and billing visibility for internal support. +- Routed enterprise contact requests into Loops and restored a mobile logout path in Den. +- Surfaced skill reload and sharing feedback more clearly and moved runtime status into Settings > Advanced. + +#### Lines of code changed since previous release +2493 lines changed since `v0.11.144` (2031 insertions, 462 deletions). + +#### Release importance +Minor release: adds a focused operator capability and several UX improvements without broadly reshaping the OpenWork product. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added a Den admin backoffice dashboard for internal support and worker operations. +- Wired enterprise contact submissions into Loops for follow-up handling. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Improved skill sharing and hot-reload flows in the desktop app. +- Restored a mobile logout path in Den. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.146 + +#### Commit +`8809a801` + +#### Released at +`2026-03-13T19:14:51Z` + +#### Title +Failed worker redeploy and safer skill imports + +#### One-line summary +Adds direct Den worker redeploys, makes shared skills pick a destination worker before import, and improves both local Den setup and Chrome-first guidance. + +#### Main changes +- Added a redeploy action for failed Den workers so users can recover instead of getting stuck. +- Made shared skill imports choose a destination worker before import, including new-worker and remote-worker paths. +- Added a dockerized local Den test stack and pushed browser setup toward the Chrome MCP path. + +#### Lines of code changed since previous release +3499 lines changed since `v0.11.145` (2158 insertions, 1341 deletions). + +#### Release importance +Minor release: improves recovery, import routing, and shell usability in focused ways without a major product-level change. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added a failed-worker redeploy action in Den. +- Added destination-worker selection before importing shared skills. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Kept the status footer more stable when moving between settings and sessions. +- Made the browser quickstart target Chrome MCP first so setup guidance matches the expected path better. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.147 + +#### Commit +`a829371b` + +#### Released at +`2026-03-14T01:31:52Z` + +#### Title +Existing-worker imports and local Share publishing + +#### One-line summary +Lets shared skills install into existing workers, adds a local Docker-backed Share publisher for self-hosted testing, and keeps Den worker provisioning current. + +#### Main changes +Shared skills can now be imported straight into an existing worker from the app. + +Also released: + +- Added a local Docker-backed Share publisher for self-hosted dev flows. +- Bundled fresh OpenCode builds for Den workers and improved missing Chrome extension guidance. + +#### Lines of code changed since previous release +1727 lines changed since `v0.11.146` (1551 insertions, 176 deletions). + +#### Release importance +Minor release: extends sharing workflows and fixes setup friction without materially changing OpenWork's overall architecture. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Added an existing-worker import flow for shared skills. +- Added a local Docker publisher flow for OpenWork Share. +- Bundled OpenCode for Den Render workers so worker provisioning is more self-contained. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Added in-app guidance when the Chrome control extension is missing. +- Fixed long pasted skill previews so wrapping remains readable. +- Stopped pinning stale OpenCode builds in Den worker provisioning. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.148 + +#### Commit +`9a3aef42` + +#### Released at +`2026-03-14T22:28:03Z` + +#### Title +Guided Den onboarding and single-skill Share + +#### One-line summary +Turns Den signup into a calmer guided flow with clearer provisioning states while refocusing OpenWork Share on publishing one skill well. + +#### Main changes +- Rebuilt Den onboarding as a guided flow with clearer naming, intent, loading, and browser-access states. +- Simplified OpenWork Share to publish a single skill, with cleaner frontmatter and fuller shared previews. +- Added a polished feedback card and clearer import and status feedback across app surfaces. + +#### Lines of code changed since previous release +4390 lines changed since `v0.11.147` (2764 insertions, 1626 deletions). + +#### Release importance +Major release: substantially changes both Den onboarding and the OpenWork Share publishing flow in ways users will immediately notice. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Redesigned Den onboarding into a guided stepper flow. +- Simplified OpenWork Share to publish a single skill. +- Added a polished feedback entrypoint card in Settings. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Polished the shared skill import flow so import progress and outcomes are clearer. +- Slimmed session sidebar density so active chat navigation is easier to scan. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.149 + +#### Commit +`6acc6f79` + +#### Released at +`2026-03-14T23:56:20Z` + +#### Title +Richer skill previews and steadier jump-to-latest + +#### One-line summary +Makes shared skill pages easier to evaluate before import, steadies the worker-selection flow, and keeps long chats pinned to the newest reply while thinking. + +#### Main changes +- Simplified shared skill pages and added richer workspace previews before import. +- Steadied shared-skill deep-link handling and worker selection so imports fire once and land more predictably. +- Kept Jump to latest pinned during assistant thinking and reduced blank tail space in long chats. + +#### Lines of code changed since previous release +3906 lines changed since `v0.11.148` (2531 insertions, 1375 deletions). + +#### Release importance +Minor release: focuses on stabilizing sharing and long-chat behavior rather than introducing a new top-level workflow. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Simplified shared skill pages so previews are easier to understand before import. +- Steadied the shared skill import flow so destination handling behaves more predictably. +- Kept Jump to latest pinning stable while long responses are still streaming. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.150 + +#### Commit +`4f89e04d` + +#### Released at +`2026-03-15T01:05:19Z` + +#### Title +Faster provider setup and steadier chat rendering + +#### One-line summary +Prioritizes common providers in new-session setup, reduces inline image churn while chatting, and routes feedback straight to the team inbox. + +#### Main changes +- Moved common providers like OpenAI and Anthropic to the front and hid the redundant ChatGPT prompt in new-session setup. +- Reduced inline image rerender churn so active chats feel steadier. +- Kept Settings width stable and sent feedback from Settings directly to the team inbox. + +#### Lines of code changed since previous release +342 lines changed since `v0.11.149` (241 insertions, 101 deletions). + +#### Release importance +Minor release: delivers focused session and settings polish without materially changing OpenWork's broader workflows. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Prioritized common providers in the auth flow so setup starts from the most likely choices. +- Hid a redundant ChatGPT prompt in the session flow. +- Reduced inline image churn during chat rendering. +- Kept the settings shell width stable and routed feedback to the team inbox. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. diff --git a/changelog/release-tracker-2026-03-15.md b/changelog/release-tracker-2026-03-15.md new file mode 100644 index 0000000000..a98ad5a5ab --- /dev/null +++ b/changelog/release-tracker-2026-03-15.md @@ -0,0 +1,1324 @@ +# Release Changelog Tracker + +Internal preparation file for release summaries. This is not yet published to the changelog page or docs. + +## v0.11.151 + +#### Commit +`5e606273` + +#### Released at +`2026-03-15T03:20:31Z` + +#### Title +Feedback emails reach the team inbox again + +#### One-line summary +Fixes the in-app feedback email target so reports reach the shared OpenWork inbox again. + +#### Main changes +Updates the feedback mail link to send reports to `team@openworklabs.com`, restoring the intended shared inbox for in-app feedback. + +#### Lines of code changed since previous release +81 lines changed since `v0.11.150` (55 insertions, 26 deletions). + +#### Release importance +Minor release: fixes a focused feedback delivery problem without changing the surrounding product flow. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed the feedback flow so submitted messages are sent to the OpenWork team inbox. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.152 + +#### Commit +`2386e59d` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-14T20:53:19-07:00`. + +#### Title +CI workflows move to Blacksmith runners + +#### One-line summary +Moves CI and release workflows onto Blacksmith-backed runners, with no visible OpenWork app, web, or server workflow change. + +#### Main changes +Repoints CI and release workflows to Blacksmith runners and larger Linux release builders, tightening the release pipeline without changing user-facing product behavior. + +#### Lines of code changed since previous release +70 lines changed since `v0.11.151` (35 insertions, 35 deletions). + +#### Release importance +Minor release: updates release infrastructure only, with no intended user-facing product change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.153 + +#### Commit +`f35422b7` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-14T22:35:30-07:00`. + +#### Title +Live session updates and scroll pinning recover + +#### One-line summary +Restores real-time assistant streaming in sessions while keeping long replies pinned correctly and web event streaming more reliable. + +#### Main changes +- Restored incremental session text updates so assistant replies stream live again. +- Reworked chat pinning and jump controls so long responses stay easier to follow. +- Let the web proxy pass event streams through more safely during fallback handling. + +#### Lines of code changed since previous release +449 lines changed since `v0.11.152` (315 insertions, 134 deletions). + +#### Release importance +Minor release: repairs a core live-session behavior without materially changing OpenWork's overall workflow model. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Restored live session updates so streaming conversations refresh in place again. +- Fixed scroll pinning so active sessions can stay attached to the newest output. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.154 + +#### Commit +`90c167f9` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-15T07:58:03-07:00`. + +#### Title +Desktop release packaging is reworked + +#### One-line summary +Reorganizes the desktop release pipeline around workflow artifacts and staged asset upload, with no direct product workflow change. + +#### Main changes +Rebuilds the desktop release pipeline to package workflow artifacts first, verify bundled sidecar metadata, and upload release assets in a later step, including a dedicated Linux ARM64 path. + +#### Lines of code changed since previous release +976 lines changed since `v0.11.153` (488 insertions, 488 deletions). + +#### Release importance +Minor release: updates release packaging only, with no intended user-facing product change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.155 + +#### Commit +`725b2117` + +#### Released at +`2026-03-15T16:08:25Z` + +#### Title +Windows release diagnostics stop masking failures + +#### One-line summary +Improves release-pipeline diagnostics and normalizes workflow action inputs so broken Windows packaging runs are easier to debug. + +#### Main changes +Fixes the Windows GitHub connectivity diagnostic step and aligns release-action input names so maintainers get clearer failure signals during desktop release packaging. + +#### Lines of code changed since previous release +51 lines changed since `v0.11.154` (27 insertions, 24 deletions). + +#### Release importance +Minor release: improves release reliability only, with no intended user-facing product change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.156 + +#### Commit +`598fed9d` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-15T10:06:37-07:00`. + +#### Title +Desktop release packaging splits build from upload + +#### One-line summary +Restructures desktop release automation to build artifacts first, bundle Linux ARM separately, and upload assets in a final pass. + +#### Main changes +Splits desktop releases into build, bundle, and upload stages, adds separate Linux ARM packaging, and introduces automated asset upload from workflow artifacts. + +#### Lines of code changed since previous release +602 lines changed since `v0.11.155` (486 insertions, 116 deletions). + +#### Release importance +Minor release: updates release packaging flow only, with no intended user-facing product change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.157 + +#### Commit +`fca457be` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-15T12:27:44-07:00`. + +#### Title +Den access controls tighten and nested task sessions return + +#### One-line summary +Hardens Den sign-in and worker access while restoring inline subagent transcripts, selected-row session actions, and cleaner feedback links. + +#### Main changes +- Requires verified Den accounts, removes exposed host tokens, and limits worker access more tightly by user role. +- Renders subagent sessions inline under task steps and moves rename/delete actions into the selected session row. +- Opens feedback mail links in place on the web instead of leaving a blank tab behind. + +#### Lines of code changed since previous release +706 lines changed since `v0.11.156` (485 insertions, 221 deletions). + +#### Release importance +Minor release: improves session clarity and fixes a few focused interaction issues without changing the broader OpenWork model. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed subagent sessions so child work stays attached to the task step that spawned it. +- Fixed session list actions so controls live on the selected row instead of feeling misplaced. +- Fixed web feedback email links so they no longer open a stray blank tab. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.158 + +#### Commit +`09837baf` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-15T12:43:37-07:00`. + +#### Title +Orchestrator npm publish runs from package cwd + +#### One-line summary +Fixes the release workflow so `openwork-orchestrator` publishes from `packages/orchestrator`, with no visible app or web workflow change. + +#### Main changes +Corrects the orchestrator publish job to run from the package directory so sidecar build and npm publish steps use the right paths. + +#### Lines of code changed since previous release +33 lines changed since `v0.11.157` (17 insertions, 16 deletions). + +#### Release importance +Minor release: updates release publishing plumbing only, with no intended user-facing product change. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.159 + +#### Commit +`0559b341` + +#### Released at +`2026-03-15T20:36:46Z` + +#### Title +Den cloud billing and worker launch align + +#### One-line summary +Aligns the app-hosted Den flow with landing-page messaging, restores checkout handling for extra workers, and fixes visible billing and marketing regressions. + +#### Main changes +- Reworked the app cloud-worker flow to match the Den landing experience and messaging. +- Restored Polar checkout and return handling for additional cloud workers. +- Fixed the Den marketing rail and removed a dead billing navigation path. + +#### Lines of code changed since previous release +2472 lines changed since `v0.11.158` (1192 insertions, 1280 deletions). + +#### Release importance +Minor release: meaningfully improves the hosted cloud flow and corrects a couple of visible web regressions without redefining OpenWork's overall product shape. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Aligned the app cloud-worker flow with the Den landing experience for a more consistent hosted setup path. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed the Den marketing rail so the hosted web surface renders correctly again. +- Removed an impossible billing navigation branch from the cloud control experience. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.160 + +#### Commit +`a9e56ec0` + +#### Released at +`2026-03-15T23:51:50Z` + +#### Title +Den auth, downloads, and nested sessions polish + +#### One-line summary +Simplifies the Den auth entry flow, sends landing-page downloads to the right installer, and restores parent-child session browsing in the sidebar. + +#### Main changes +- Simplified the Den auth screen so account entry is lighter and easier to scan. +- Download buttons now choose the right installer for the visitor's OS and architecture. +- Sidebar previews now keep subagent sessions nested under their parent tasks. + +#### Lines of code changed since previous release +475 lines changed since `v0.11.159` (303 insertions, 172 deletions). + +#### Release importance +Minor release: delivers a collection of focused UX and reliability fixes across key web and session surfaces without changing the core OpenWork workflow. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Simplified the Den auth screen so the hosted sign-in path is less confusing. +- Fixed landing download CTAs so they point users to the right installer for their OS and architecture. +- Fixed nested session rendering so subagent sessions appear under their parent tasks with clearer list structure. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.161 + +#### Commit +`4fb90428` + +#### Released at +Unreleased tag only. No published GitHub release. Tagged at `2026-03-15T16:48:43-07:00`. + +#### Title +Den first-run goes straight to connect + +#### One-line summary +Cuts onboarding friction by removing the intent step, waiting for session hydration cleanly, and ending first-run on a direct connect screen. + +#### Main changes +Removes the extra intent step, drops the transient marketing-heavy auth shell, and adds a dedicated final connect screen so new Den users can launch a worker and open it in OpenWork with fewer detours. + +#### Lines of code changed since previous release +448 lines changed since `v0.11.160` (198 insertions, 250 deletions). + +#### Release importance +Minor release: improves a focused hosted onboarding path without materially changing OpenWork's broader product model. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Improved the Den first-run experience so the hosted setup path feels more focused and intentional. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.162 + +#### Commit +`770c9473` + +#### Released at +`2026-03-16T00:51:15Z` + +#### Title +Docker dev prints LAN-ready OpenWork URLs + +#### One-line summary +Makes local Docker testing easier from phones and other devices by printing public URLs and deriving Den auth and CORS defaults from the detected host. + +#### Main changes +- The legacy Docker dev stack now prints localhost, hostname, and LAN IP URLs for the app, server, and share service. +- `den-dev-up.sh` derives auth URLs and trusted origins for cross-device testing. +- Added `OPENWORK_PUBLIC_HOST` and `DEN_PUBLIC_HOST` overrides when auto-detection is wrong. + +#### Lines of code changed since previous release +149 lines changed since `v0.11.161` (130 insertions, 19 deletions). + +#### Release importance +Minor release: improves local stack accessibility for testing and self-hosted development without changing the main OpenWork product flow. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Improved Docker dev-stack defaults so OpenWork is easier to access from other devices on local networks. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.163 + +#### Commit +`69249a20` + +#### Released at +`2026-03-16T02:47:00Z` + +#### Title +Custom skill hub repos and steadier session actions + +#### One-line summary +Lets teams browse and install skills from any GitHub hub repo while making session switching, composer focus, and reload prompts behave more predictably. + +#### Main changes +- Added custom GitHub skill hub repos, including saved repo selection and install-from-that-repo flows. +- Cmd+K session actions now return focus to the composer after opening or creating sessions. +- Restored the inline skill reload banner and cleaned up workspace status alignment. + +#### Lines of code changed since previous release +1169 lines changed since `v0.11.162` (1034 insertions, 135 deletions). + +#### Release importance +Minor release: adds a focused new skills-source capability and cleans up session interaction issues without changing the product's overall shape. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added custom GitHub skill hub repository support so organizations can use their own hosted skill sources inside OpenWork. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Preserved composer focus after Cmd+K session actions. +- Restored the inline skill reload banner in sessions. +- Aligned worker status labels with worker names for clearer scanning. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.164 + +#### Commit +`b88e2b53` + +#### Released at +`2026-03-16T15:14:38Z` + +#### Title +Owner tokens and child sessions stay visible + +#### One-line summary +Clarifies remote approval access with owner tokens, keeps nested child sessions from disappearing in sidebar syncs, and broadens polish across sharing and localization. + +#### Main changes +- Remote and cloud connect flows now expose owner tokens separately from collaborator tokens for permission prompts. +- Sidebar resyncs stop dropping child task sessions when root items refresh. +- Added Japanese localization and sharper HTML-first share previews. + +#### Lines of code changed since previous release +2418 lines changed since `v0.11.163` (1907 insertions, 511 deletions). + +#### Release importance +Minor release: improves visibility, recovery, and localization across key flows without materially changing OpenWork's core architecture. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added full Japanese localization coverage for the app. +- Improved share previews with HTML-first crawler links and more polished Open Graph cards. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Preserved child task sessions during root sidebar syncs. +- Exposed owner tokens in remote permission prompts so recovery flows are easier to finish. +- Allowed removing the default skills hub repository for fully custom skills setups. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.165 + +#### Commit +`d556ed53` + +#### Released at +`2026-03-17T02:56:06Z` + +#### Title +Settings can sign into Cloud and open workers + +#### One-line summary +Adds an in-app OpenWork Cloud settings flow for sign-in, org selection, and worker opening, while smoothing desktop auth handoff and share reliability. + +#### Main changes +- Added a Cloud tab in Settings for sign-in, org selection, worker lists, and opening ready Den workers into OpenWork. +- Routed desktop auth through the web handoff flow, including installed-app scheme support and bearer-session handling. +- Restored shared bundle installs and fully cleared disconnected provider credentials. + +#### Lines of code changed since previous release +3120 lines changed since `v0.11.164` (2391 insertions, 729 deletions). + +#### Release importance +Major release: introduces a substantial new OpenWork Cloud workflow and expands how users authenticate and open cloud workers from the product. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added OpenWork Cloud authentication and worker-open controls directly in Settings. +- Added web-based desktop auth handoff for Den so cloud and desktop sign-in flows connect more smoothly. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Restored shared bundle installs and repeat app opens in OpenWork Share. +- Fully cleared disconnected provider credentials. +- Fixed Den auth handoff to use the installed desktop scheme reliably. +- Improved share preview readability so unfurls are easier to scan. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.166 + +#### Commit +`81882826` + +#### Released at +`2026-03-17T05:45:14Z` + +#### Title +Daytona-backed Den Docker flow ships + +#### One-line summary +Introduces a Daytona-first local Den stack with a worker proxy and snapshot tooling, while tightening org setup and local developer startup paths. + +#### Main changes +- Added a Daytona-backed Den Docker flow with a dedicated worker proxy and snapshot builder for preloaded runtimes. +- Introduced the `den-v2` control plane and shared Den DB packages for the new hosted-worker path. +- Fixed unique org slug generation and the `webdev:local` startup script. + +#### Lines of code changed since previous release +13718 lines changed since `v0.11.165` (12760 insertions, 958 deletions). + +#### Release importance +Major release: lands a major Den runtime and development-stack expansion that materially changes how cloud-worker flows are developed and tested. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added a full Daytona-backed Den Docker development flow with new controller, proxy, schema, and provisioning pieces for cloud-worker workflows. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Enforced stable org and environment syncing with unique org slugs for Den dev setups. +- Fixed the `webdev:local` helper script so local web startup works reliably. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.167 + +#### Commit +`5ac86e5a` + +#### Released at +Unreleased draft release. Tagged at `2026-03-16T22:50:30-07:00`. + +#### Title +Cloud settings stay gated by developer mode + +#### One-line summary +Keeps the Cloud settings tab and default settings route aligned with developer mode so regular users do not land in unfinished controls. + +#### Main changes +Fixes the Settings tab list and default settings route so Cloud controls only appear in developer mode, matching the intended rollout of the new OpenWork Cloud panel. + +#### Lines of code changed since previous release +45 lines changed since `v0.11.166` (23 insertions, 22 deletions). + +#### Release importance +Minor release: fixes a narrow but important settings visibility regression for advanced cloud workflows. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Restored access to Cloud settings controls in Developer Mode so advanced cloud setup remains reachable. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.168 + +#### Commit +`603ddfee` + +#### Released at +`2026-03-17T06:27:40Z` + +#### Title +Release recovery with repaired installers + +#### One-line summary +Republishes the release with repaired assets; the tagged diff itself is metadata-only, while the intended Cloud tab gating fix landed in `v0.11.167`. + +#### Main changes +This tag mainly recovers the release process: `v0.11.167..v0.11.168` only bumps package versions, while the user-visible Cloud settings gating change was already in the prior tag. + +#### Lines of code changed since previous release +26 lines changed since `v0.11.167` (13 insertions, 13 deletions). + +#### Release importance +Minor release: recovers a small settings-flow fix and restores release/install reliability without changing the product's broader behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Prevented hidden Cloud settings state from stranding Den desktop handoff flows. +- Restored frozen-lockfile release installs and the expected desktop asset publication set. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.169 + +#### Commit +`9ea1957b` + +#### Released at +`2026-03-18T00:11:42Z` + +#### Title +Den handoff and session chrome get steadier + +#### One-line summary +Keeps Den browser handoff and worker naming in sync while cleaning up session focus, reload banners, run status, and broken sidebar affordances. + +#### Main changes +Hardened Den sign-in and worker-open handoff by separating browser and API base URLs and returning proxy-safe desktop auth URLs. + +Also released: + +- Restored composer focus, flattened reload banners, and removed the broken artifacts rail. +- Simplified OpenWork Share OG previews and cleaned up Den landing CTAs. + +#### Lines of code changed since previous release +3699 lines changed since `v0.11.168` (2421 insertions, 1278 deletions). + +#### Release importance +Minor release: focuses on connection reliability and session polish across existing workflows rather than reshaping the product. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +5 + +#### Major bug fix details +- Persisted Den browser and API base URLs separately to avoid broken desktop handoff state. +- Restored proxy-safe desktop handoff and browser-facing CORS behavior for Den workers. +- Kept open-in-web links auto-connecting reliably into sessions. +- Restored composer focus after command actions and simplified session run-state feedback. +- Removed the broken artifacts rail and flattened the reload-required banner in sessions. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.170 + +#### Commit +`3869313b` + +#### Released at +`2026-03-19T17:27:40Z` + +#### Title +OpenWork Cloud web flows and remote reconnects improve + +#### One-line summary +Reworks the hosted Cloud web flow while making remote worker links, persisted share tokens, provider auth, and desktop close behavior more dependable. + +#### Main changes +- Rebuilt Den web auth, checkout, and dashboard routes so hosted onboarding and billing feel like the app instead of a one-off page. +- Persisted worker share tokens and repeated deeplinks across restarts, with stronger open-in-web auto-connect and connect overlays. +- Added self-serve Cloud settings, OpenAI headless auth, and tray-on-close desktop behavior. + +#### Lines of code changed since previous release +20054 lines changed since `v0.11.169` (7642 insertions, 12412 deletions). + +#### Release importance +Major release: substantially changes the hosted OpenWork Cloud experience and remote-connect workflow across web, desktop, and cloud surfaces. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Tailored the hosted web app UI and Den onboarding flow for OpenWork Cloud deployments. +- Made Cloud settings self-serve and exposed OpenAI headless auth so more provider and cloud setup can happen directly in-product. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +5 + +#### Major bug fix details +- Restored Polar billing flow during Den checkout. +- Persisted worker share tokens across restarts. +- Restored repeated shared-skill deeplinks in the desktop app. +- Kept open-in-web auto-connect and the worker overlay working reliably during connect. +- Improved desktop behavior by hiding to tray on close and restoring the window correctly. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.171 + +#### Commit +`10ec28d6` + +#### Released at +Unreleased draft release. Tagged at `2026-03-19T14:01:13-07:00`. + +#### Title +Session trace rows only open when they have details + +#### One-line summary +Stops empty trace rows from expanding, removes stray desktop token-store test code from releases, and moves the repo into the new apps and ee layout. + +#### Main changes +- Only trace rows with real details expand, with tighter mobile wrapping and clearer tool icons. +- Removed stray token-store test code from desktop release code. +- Reorganized the repo into `apps/` and `ee/` paths without changing app behavior. + +#### Lines of code changed since previous release +1577 lines changed since `v0.11.170` (986 insertions, 591 deletions). + +#### Release importance +Minor release: fixes startup and session-trace issues while carrying a mostly structural repo reorganization underneath. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Removed stray desktop token-store test code that could affect startup and release reliability. +- Made session trace rows expand only when real details exist, improving readability and reducing visual noise. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.172 + +#### Commit +`d47a194d` + +#### Released at +`2026-03-19T22:28:14Z` + +#### Title +Server package naming and session traces line up + +#### One-line summary +Renames the published server package to `openwork-server` and polishes trace-row icon and chevron alignment so session runs scan more cleanly. + +#### Main changes +- Renamed the published server package to `openwork-server`, updating orchestrator, release, and dev tooling to resolve the same package consistently. +- Tightened trace-row icon and chevron alignment so session summaries read cleanly. + +#### Lines of code changed since previous release +3006 lines changed since `v0.11.171` (2296 insertions, 710 deletions). + +#### Release importance +Minor release: improves packaging consistency and session trace polish without materially changing user workflows. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Resolved inconsistent server package naming across install, publish, and verification paths. +- Fixed session trace row alignment so icons and chevrons stay visually aligned with summaries. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Replaced prior published server package references with the standardized `openwork-server` naming. + +## v0.11.173 + +#### Commit +`5f0e11ce` + +#### Released at +`2026-03-20T00:55:12Z` + +#### Title +Daytona workers report activity and local Node tools spawn reliably + +#### One-line summary +Adds worker heartbeats for Daytona-backed Cloud workers while making local MCP and tool launches work in nvm-managed Node environments. + +#### Main changes +- Added Daytona worker activity heartbeats so Cloud worker state stays fresher. +- Added release snapshot automation for Daytona images. +- Exposed `nvm`-managed Node paths to local spawns so MCP tools and local commands find Node more reliably. + +#### Lines of code changed since previous release +805 lines changed since `v0.11.172` (762 insertions, 43 deletions). + +#### Release importance +Minor release: improves worker runtime observability and local spawn compatibility without materially changing how most users operate OpenWork. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added Daytona worker activity heartbeats to improve worker liveness tracking for cloud-worker flows. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Exposed nvm-managed Node tools to local spawns so local tool execution works in more environments. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.174 + +#### Commit +`9f3890f6` + +#### Released at +Unreleased draft release. Tagged at `2026-03-19T18:59:35-07:00`. + +#### Title +Session traces go back to the familiar behavior + +#### One-line summary +Restores the original trace interaction model, brings back summary copy actions, and keeps worker names readable in narrow sidebars. + +#### Main changes +- Reverted the newer expandable trace treatment and restored the original session trace behavior. +- Brought back trace summary copy actions. +- Kept worker names visible in narrow sidebars instead of collapsing them away. + +#### Lines of code changed since previous release +508 lines changed since `v0.11.173` (107 insertions, 401 deletions). + +#### Release importance +Minor release: rolls back confusing trace behavior and repairs sidebar readability without changing the product's broader workflow model. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Restored the original session trace interaction model. +- Restored trace summary copy actions. +- Preserved worker names in narrow sidebars. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.175 + +#### Commit +`da0cd71c` + +#### Released at +`2026-03-20T05:53:41Z` + +#### Title +Authorized folders and first-run session guidance move into Settings + +#### One-line summary +Adds Settings-based folder authorization and server-backed empty states, then tightens sidebar and composer labeling so navigation stays readable. + +#### Main changes +Adds a server-backed way to manage authorized folders and to seed first-run session empty states from workspace blueprints. + +Also released: + +- Cleaner sidebar titles, status labels, footer pinning, and hidden generated timestamps. +- Restored composer action labels and removed the dead artifacts rail. + +#### Lines of code changed since previous release +1685 lines changed since `v0.11.174` (1313 insertions, 372 deletions). + +#### Release importance +Minor release: adds focused settings and onboarding improvements while mainly polishing existing app-shell and sidebar behavior. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added authorized-folder management directly in Settings. +- Added server-backed session empty states to guide first-run and worker setup more clearly. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Restored composer action labels. +- Removed the session sidebar artifacts rail. +- Kept workspace actions visible and quieter status labels easier to scan in the sidebar. +- Fixed sidebar footer pinning, title truncation, timestamp readability, and flex overflow issues. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Removed the session sidebar artifacts rail in favor of a cleaner sidebar flow. diff --git a/changelog/release-tracker-2026-03-20.md b/changelog/release-tracker-2026-03-20.md new file mode 100644 index 0000000000..8b28d0300d --- /dev/null +++ b/changelog/release-tracker-2026-03-20.md @@ -0,0 +1,1316 @@ +# Release Changelog Tracker + +Internal preparation file for release summaries. This is not yet published to the changelog page or docs. + +## v0.11.176 + +#### Commit +`47b6f7e3` + +#### Released at +Unreleased draft release. Tagged at `2026-03-20T12:51:31-07:00`. + +#### Title +OpenAI setup points new chats the right way + +#### One-line summary +Makes first-run provider setup clearer by sending new chats into the ChatGPT flow and fixing remote messaging health reporting. + +#### Main changes +- Swapped the starter CTA to Connect ChatGPT and hid it once OpenAI is already connected. +- Made OpenAI auth worker-aware so remote workers use the device flow with copyable codes and manual browser launch. +- Fixed worker-scoped router health so remote messaging no longer appears unconfigured in Settings and identities. + +#### Lines of code changed since previous release +1079 lines changed since `v0.11.175` (618 insertions, 461 deletions). + +#### Release importance +Minor release: fixes provider onboarding and remote messaging reliability without materially changing the product's overall shape. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed provider onboarding so the new-session CTA sends users through the correct OpenAI connection flow, including remote-worker cases. +- Fixed remote messaging router health reporting so configured remote workers no longer look broken in settings and identities flows. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.177 + +#### Commit +`9603be37` + +#### Released at +`2026-03-20T20:54:48Z` + +#### Title +Downloads CTA and npm install fallback land + +#### One-line summary +Gets desktop users to the right download path faster and lets `openwork-orchestrator` recover when npm skips its platform binary. + +#### Main changes +Routes the desktop landing CTA to the Download page so the install path is clearer. + +Also released: + +- `openwork-orchestrator` postinstall now downloads the matching release binary when optional platform packages are missing. +- Daytona snapshot builds now use the source orchestrator binary. + +#### Lines of code changed since previous release +175 lines changed since `v0.11.176` (139 insertions, 36 deletions). + +#### Release importance +Minor release: improves install-path clarity and local install resilience with a focused release-engineering patch. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed the landing CTA so new users reach the downloads page directly instead of taking a less useful route. +- Fixed orchestrator npm installs so they can fall back to published binaries when the local install path fails. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.178 + +#### Commit +`1cc5360f` + +#### Released at +`2026-03-22T03:08:43Z` + +#### Title +Workspace sharing and model controls get a major refresh + +#### One-line summary +Redesigns workspace sharing and sidebar structure, makes reasoning controls model-aware, and adds a hosted feedback flow. + +#### Main changes +- Redesigned workspace sharing and the right sidebar, including cleaner remote credentials and nested child sessions. +- Made model pickers model-aware with provider icons and per-model reasoning or behavior controls. +- Moved app feedback to a hosted form, added an Exa toggle, and stopped forcing starter workspaces on desktop boot. + +#### Lines of code changed since previous release +8432 lines changed since `v0.11.177` (5335 insertions, 3097 deletions). + +#### Release importance +Major release: substantially reshapes navigation, sharing, and model-control flows across the app. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Redesigned workspace sharing and introduced a unified right sidebar with nested child sessions. +- Added model-aware behavior controls so provider-specific options are clearer in the composer and settings. +- Moved app feedback into a hosted feedback form that is reachable directly from app surfaces. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Restored the in-composer Run action and stabilized the composer footer after recent UI regressions. +- Fixed session and settings follow-up regressions that made remote connect, picker behavior, and transcript affordances feel inconsistent. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.179 + +#### Commit +`5f043456` + +#### Released at +`2026-03-22T05:34:34Z` + +#### Title +Den checkout and workspace setup get leaner + +#### One-line summary +Simplifies Den billing and dashboard surfaces, streamlines workspace creation flows, and removes the desktop tray path. + +#### Main changes +- Simplified the create-workspace and connect-remote modals so setup fields read more clearly. +- Refreshed Den checkout and dashboard screens into a flatter, cleaner shell. +- Removed desktop tray support and now requires contact details on hosted feedback submissions. + +#### Lines of code changed since previous release +1025 lines changed since `v0.11.178` (539 insertions, 486 deletions). + +#### Release importance +Minor release: focuses on checkout, workspace setup, and a few visible desktop/share fixes without changing the overall product model. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Removed tray support so desktop close behavior no longer depends on a redundant background tray icon. +- Removed duplicate thinking labels in sessions so streamed reasoning state is easier to read. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Removed desktop tray support from the app. + +## v0.11.180 + +#### Commit +`093ee573` + +#### Released at +Unreleased draft release. Tagged at `2026-03-22T09:29:16-07:00`. + +#### Title +Den landing and provisioning visuals get pared back + +#### One-line summary +Mostly a docs-and-artifact cleanup release, with a small Den landing and provisioning UI simplification. + +#### Main changes +Mostly removes internal PR docs and screenshots, while trimming the Den landing hero and simplifying the worker provisioning animation. No clear core app, server, or developer workflow changes land in this tag. + +#### Lines of code changed since previous release +3020 lines changed since `v0.11.179` (23 insertions, 2997 deletions). + +#### Release importance +Minor release: pares back visual complexity in Den onboarding surfaces without materially changing product behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.181 + +#### Commit +`abcfdfc7` + +#### Released at +`2026-03-22T17:02:23Z` + +#### Title +Version metadata is republished in sync + +#### One-line summary +Republishes synchronized package versions only, with no distinct user-facing or developer-facing workflow change. + +#### Main changes +Packaging-only release that syncs version metadata across app, desktop, server, router, and orchestrator packages. No material workflow, UI, API, or docs behavior changes are visible from the code. + +#### Lines of code changed since previous release +58 lines changed since `v0.11.180` (40 insertions, 18 deletions). + +#### Release importance +Minor release: primarily refreshes release artifacts and synchronized version metadata. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.182 + +#### Commit +`7a0e31d0` + +#### Released at +`2026-03-23T01:48:48Z` + +#### Title +Local workspaces move under OpenWork server control + +#### One-line summary +Shifts local workspace ownership into OpenWork server so creation, reconnects, config writes, and starter bootstrap stay aligned across the app. + +#### Main changes +- Local workspace create, rename, delete, config writes, and reload events now go through OpenWork server first. +- First-run starter bootstrap and reconnect logic are more reliable across onboarding and sidebar flows. +- Simplified the remote connect modal, moved tool-trace chevrons right, and added Windows ARM64 dev startup support. + +#### Lines of code changed since previous release +1792 lines changed since `v0.11.181` (1510 insertions, 282 deletions). + +#### Release importance +Major release: lands a substantial server-ownership and runtime-architecture change that materially affects core local workspace behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed local workspace reconnect and onboarding inconsistencies by moving workspace ownership into OpenWork server. +- Fixed remote connect friction by simplifying the modal users see when attaching to a remote workspace. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.183 + +#### Commit +`160198ab` + +#### Released at +`2026-03-23T05:01:53Z` + +#### Title +Exa moves into OpenCode settings + +#### One-line summary +Surfaces the Exa search toggle in a clearer OpenCode settings section and rolls back an unready macOS path-normalization change. + +#### Main changes +- Added an OpenCode settings panel that exposes the Exa web-search toggle in a clearer place. +- Reverted the macOS path case-folding change to avoid destabilizing session and workspace matching. +- Also removed leftover docs-plan and screenshot artifacts. + +#### Lines of code changed since previous release +614 lines changed since `v0.11.182` (53 insertions, 561 deletions). + +#### Release importance +Minor release: adds a focused advanced-settings capability while avoiding a risky macOS path change. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added Exa as a configurable option in Advanced settings. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Reverted an unready macOS path normalization change so users do not pick up unstable workspace-path behavior. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.184 + +#### Commit +`09204a02` + +#### Released at +`2026-03-23T15:04:42Z` + +#### Title +CLI quickstart becomes the primary docs path + +#### One-line summary +Rebuilds the docs around a single CLI-first quickstart and removes older split onboarding paths that were harder to follow. + +#### Main changes +Collapses the docs nav to a single quickstart, rewrites onboarding around the remote CLI plus desktop connect flow, and removes older introduction, technical, non-technical, and tutorial pages so first-run guidance is much narrower and easier to scan. + +#### Lines of code changed since previous release +898 lines changed since `v0.11.183` (121 insertions, 777 deletions). + +#### Release importance +Minor release: narrows the documentation surface around Quickstart without changing shipped product behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.185 + +#### Commit +`5584dfd6` + +#### Released at +`2026-03-24T05:34:00Z` + +#### Title +Safer local sharing and messaging setup + +#### One-line summary +Defaults local workers to loopback-only, makes public messaging exposure more deliberate, and adds guided setup for Chrome control. + +#### Main changes +Local workers now stay localhost-only unless users explicitly opt into remote exposure, messaging is disabled until enabled on purpose, public Telegram bot creation shows a risk warning, and Chrome DevTools MCP gets a guided setup flow. + +#### Lines of code changed since previous release +5434 lines changed since `v0.11.184` (4780 insertions, 654 deletions). + +#### Release importance +Major release: materially changes sharing and messaging defaults while adding meaningful setup and localization improvements. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added a guided Control Chrome setup flow inside the app. +- Added Brazilian Portuguese (`pt-BR`) localization. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Local workers now stay localhost-only by default unless users intentionally expose them for sharing. +- Hardened Den and public publishing/auth surfaces so shared flows are less likely to leak into unsafe configurations. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.186 + +#### Commit +`30737e99` + +#### Released at +`2026-03-24T06:16:26Z` + +#### Title +Local reconnects stay scoped to the right workspace + +#### One-line summary +Fixes restart and reconnect flows so local sessions and starter workspaces stay attached to the intended directory. + +#### Main changes +Keeps local session history scoped to the active workspace during reconnects and normalizes persisted starter paths on desktop bootstrap, so restarts stop reopening or creating sessions against the wrong local directory. + +#### Lines of code changed since previous release +397 lines changed since `v0.11.185` (343 insertions, 54 deletions). + +#### Release importance +Minor release: fixes local reconnect and bootstrap scoping issues without introducing broader workflow changes. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed local reconnect behavior so workspace history stays scoped to the active workspace instead of a stale directory. +- Fixed starter-path handling so older persisted local paths reconnect correctly during desktop bootstrap. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.187 + +#### Commit +`5d1c6a28` + +#### Released at +`2026-03-24T15:09:03Z` + +#### Title +Windows workspace scoping handles verbatim paths + +#### One-line summary +Normalizes Windows path transport, verbatim prefixes, and UNC comparisons so local session scope stays consistent across directory formats. + +#### Main changes +Normalizes Windows directory strings end to end, strips verbatim path prefixes, and fixes UNC comparison logic so session lists, deletes, and workspace switching all target the same local workspace scope. + +#### Lines of code changed since previous release +210 lines changed since `v0.11.186` (173 insertions, 37 deletions). + +#### Release importance +Minor release: fixes a focused but important Windows path-scoping problem without changing the broader product experience. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed Windows directory transport mismatches that caused session and sidebar scope checks to disagree. +- Fixed verbatim path-prefix handling so equivalent Windows paths no longer compare as different workspaces. +- Fixed UNC path comparisons so Windows reconnect and worker-switch flows stay scoped correctly. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.188 + +#### Commit +`c9e00db6` + +#### Released at +`2026-03-24T16:29:47Z` + +#### Title +Landing feedback returns to the previous flow + +#### One-line summary +Backs out the Loops feedback template so the landing feedback endpoint goes back to the simpler email-based path. + +#### Main changes +Reverts the Loops-based landing feedback template and config, restoring the earlier app-feedback route behavior without introducing any broader app, worker, or docs workflow changes. + +#### Lines of code changed since previous release +328 lines changed since `v0.11.187` (30 insertions, 298 deletions). + +#### Release importance +Minor release: reverts a focused feedback-flow change to restore the previously working behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Reverted the Loops feedback template rollout so the landing feedback route goes back to the prior, more reliable submission path. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.189 + +#### Commit +`a7fa0312` + +#### Released at +`2026-03-24T17:16:24Z` + +#### Title +Package metadata rolls forward only + +#### One-line summary +Updates the release line with a lockfile and version sync only, without any visible product or workflow changes. + +#### Main changes +Packaging-only release: version metadata and the pnpm lockfile are synchronized, with no meaningful user-facing or developer-facing workflow changes in the shipped code. + +#### Lines of code changed since previous release +26 lines changed since `v0.11.188` (13 insertions, 13 deletions). + +#### Release importance +Minor release: advances the release line without introducing meaningful user-facing behavior changes. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.190 + +#### Commit +`6c22f800` + +#### Released at +`2026-03-24T23:32:21Z` + +#### Title +Connection guides expand while sharing and auth settle + +#### One-line summary +Mostly docs-focused release that reorganizes onboarding around concrete connection guides, while fixing desktop publish routing and headless OpenAI auth timing. + +#### Main changes +Mostly docs-only: the site shifts to task guides for remote setup, ChatGPT, custom providers, MCPs, sharing, and skill import, while the app fixes public share routing and waits to poll headless OpenAI auth until users actually open the browser. + +#### Lines of code changed since previous release +3837 lines changed since `v0.11.189` (2654 insertions, 1183 deletions). + +#### Release importance +Minor release: improves sharing reliability, provider onboarding stability, and shell polish without materially changing the product's overall shape. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed share publishing so packaged desktop builds can publish from the correct desktop origin. +- Fixed share public routing so hardened public routes keep resolving instead of breaking after config changes. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.191 + +#### Commit +`6c9700ce` + +#### Released at +`2026-03-25T01:04:43Z` + +#### Title +Shared detached workers survive restarts + +#### One-line summary +Preserves share credentials for detached workers, hides disconnected config-backed providers, and adds clearer Slack and skill-import docs. + +#### Main changes +Detached Docker-backed workers now keep the right share credentials after restart so share links and reconnect flows keep working, disconnected config-backed providers disappear cleanly from Settings, and docs add clearer Slack and skill-import walkthroughs. + +#### Lines of code changed since previous release +495 lines changed since `v0.11.190` (413 insertions, 82 deletions). + +#### Release importance +Minor release: focuses on reliability fixes for shared workers and provider settings without adding broad new workflows. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed detached worker sharing so saved credentials survive app restarts instead of forcing users to reconnect. +- Fixed disconnected provider handling so config-backed providers stay disabled after users disconnect them. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.192 + +#### Commit +`5f30ad2a` + +#### Released at +`2026-03-25T22:30:34Z` + +#### Title +Workspace switching stops hijacking runtime state + +#### One-line summary +Separates workspace selection from runtime activation, keeps local worker ports stable, and makes shared templates carry starter content correctly. + +#### Main changes +- Split selected workspace from runtime-connected workspace so browsing no longer flips the active worker. +- Kept preferred local server ports sticky and avoided collisions across workspaces. +- Let templates carry extra `.opencode` files and starter sessions, and materialized seeded sessions correctly. + +#### Lines of code changed since previous release +4896 lines changed since `v0.11.191` (3899 insertions, 997 deletions). + +#### Release importance +Major release: materially changes how workspace switching and template-based workspace setup work across the app and server. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added richer workspace template sharing so imports can include extra `.opencode` files. +- Added starter sessions to workspace templates so new workspaces can open with seeded conversations. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed workspace switching semantics so selecting a workspace no longer needlessly reconnects runtimes. +- Fixed blueprint-seeded session materialization so starter sessions load with their intended content. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.193 + +#### Commit +`da74ba9a` + +#### Released at +`2026-03-26T05:23:19Z` + +#### Title +Team template sharing reaches OpenWork Cloud + +#### One-line summary +Adds Save-to-team template flows in the app, while Den gains organizations, member roles, invitations, and a manual Cloud sign-in fallback. + +#### Main changes +- Added Save-to-team flows for workspace templates, with org selection and Cloud sign-in prompts. +- Introduced Den organizations, member roles, invitations, custom roles, and org-scoped template APIs and screens. +- Added a manual Cloud sign-in fallback when automatic team-sharing auth stalls. + +#### Lines of code changed since previous release +7841 lines changed since `v0.11.192` (6406 insertions, 1435 deletions). + +#### Release importance +Major release: adds substantial new Cloud collaboration and organization-management workflows that materially change how teams use OpenWork. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added Cloud team template sharing flows in the OpenWork app. +- Added Den organization management, permissions, and org-scoped template sharing surfaces. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Added a manual Cloud sign-in fallback and clearer sign-in CTA so team-sharing flows are less likely to block on auth issues. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.194 + +#### Commit +`41d93e2e` + +#### Released at +`2026-03-26T20:46:09Z` + +#### Title +Auto compaction and live automations become real workflows + +#### One-line summary +Wires auto compaction to actual workspace config, keeps scheduled jobs live in-app, and adds a faster Den local-dev path. + +#### Main changes +- Wired Auto context compaction to workspace config so the setting actually changes `compaction.auto`. +- Kept scheduled jobs live by polling while the Automations view is open. +- Added a faster Den local-dev path and expanded the Cloud dashboard with shared setups and background-agent links. + +#### Lines of code changed since previous release +5198 lines changed since `v0.11.193` (3852 insertions, 1346 deletions). + +#### Release importance +Minor release: improves several active workflows and developer surfaces, but it does not substantially reshape the product's core user model. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Enabled real automatic context compaction behavior through the app's OpenCode integration. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed the auto compaction toggle so it actually wires through to OpenCode behavior. +- Fixed the custom app MCP add flow so users can stay in settings instead of getting bounced out of setup. +- Fixed automations polling so scheduled jobs keep refreshing while the page is open. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.195 + +#### Commit +`9d5b14b4` + +#### Released at +`2026-03-27T22:02:59Z` + +#### Title +Local workspace creation and Den worker setup get steadier + +#### One-line summary +Routes new local workspaces through the local host, persists model defaults properly, and smooths Den worker-connect and billing flows. + +#### Main changes +- Created local workspaces through the local host so setup and binding finish correctly. +- Preserved workspace default model changes and added a quick compact-session action in chat. +- Simplified Den organization, worker-connect, and billing flows with less polling jank. + +#### Lines of code changed since previous release +5137 lines changed since `v0.11.194` (3875 insertions, 1262 deletions). + +#### Release importance +Minor release: improves existing Den and desktop workflows with focused reliability and UX fixes rather than introducing a new product surface. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Restored full worker connect actions in Den with inline connection controls for ready workers. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed default model changes so workspace refreshes no longer wipe out newly chosen defaults. +- Fixed local workspace creation so the app creates them through the local host path reliably. +- Fixed remote workspace binding so connect flows finish attaching the workspace correctly. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.196 + +#### Commit +`663e357b` + +#### Released at +`2026-03-30T21:27:27Z` + +#### Title +OpenWork resumes where you left off + +#### One-line summary +Boots back into the last session, routes straight into session view, and moves automations onto a live scheduler-backed page. + +#### Main changes +- Reopened the last session on workspace boot and routed straight into the session view. +- Moved automations onto a dedicated page backed by live local or remote scheduler jobs. +- Fixed bootstrap and workspace switching so Welcome setup and loading states stop interrupting startup. + +#### Lines of code changed since previous release +34577 lines changed since `v0.11.195` (15875 insertions, 18702 deletions). + +#### Release importance +Major release: substantially changes the app's navigation model and retires the old dashboard concept in favor of a session-first experience. + +#### Major improvements +True + +#### Number of major improvements +2 + +#### Major improvement details +- Added a dedicated Automations page centered on live scheduler jobs. +- Restored last-session boot so workspaces reopen directly into the active conversation flow. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +2 + +#### Major bug fix details +- Fixed welcome workspace bootstrap so first-run workspace setup behaves more predictably. +- Fixed shell and session loading churn so startup and workspace switching feel less like full reloads. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Removed the old dashboard-first app concept in favor of session-first navigation and settings-owned tool surfaces. + +## v0.11.197 + +#### Commit +`020d7636` + +#### Released at +`2026-03-31T05:21:16Z` + +#### Title +Sharing gets safer and startup gets less noisy + +#### One-line summary +Hardens workspace sharing, keeps orchestrator secrets out of CLI args and logs, and removes noisy sidebar and Welcome-workspace boot behavior. + +#### Main changes +- Blocked sensitive workspace exports and showed warnings before sharing risky config or secrets. +- Trusted bundle imports only from the configured publisher unless users explicitly choose a warning-backed manual path. +- Moved OpenWork tokens off CLI args and logs, stopped auto-creating Welcome, and fixed collapsed sidebar session lists. + +#### Lines of code changed since previous release +6399 lines changed since `v0.11.196` (5657 insertions, 742 deletions). + +#### Release importance +Major release: ships important security hardening around secret handling and workspace sharing while also correcting core workspace-list behavior. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +4 + +#### Major bug fix details +- Fixed sensitive workspace exports so secrets can be detected and blocked before sharing. +- Fixed bundle fetch routing so publish and fetch traffic stays pinned to the configured OpenWork publisher. +- Fixed orchestrator secret handling so credentials no longer ride in argv and logs. +- Fixed workspace boot/sidebar behavior by stopping unwanted Welcome workspace creation and restoring missing root sessions. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.198 + +#### Commit +`761796fd` + +#### Released at +`2026-03-31T06:00:47Z` + +#### Title +Local workspace switches restart the right engine + +#### One-line summary +Fixes a local-only switch bug so changing between local workspaces restarts the engine instead of reusing the old connection. + +#### Main changes +Captures the previous local workspace path before selection changes so switching between local workspaces restarts the engine instead of reusing the old connection. + +#### Lines of code changed since previous release +100 lines changed since `v0.11.197` (59 insertions, 41 deletions). + +#### Release importance +Minor release: fixes a focused local-workspace activation bug without changing the surrounding product flow. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +1 + +#### Major bug fix details +- Fixed a local workspace switching race that could skip the required engine restart when moving between local workspaces. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.199 + +#### Commit +`4a3e43e5` + +#### Released at +`2026-04-02T02:18:50Z` + +#### Title +Pricing, skill hubs, and session recovery sharpen up + +#### One-line summary +Adds pricing and paid Windows messaging, expands Den with skill hubs and `den-api`, and improves everyday session recovery and debugging. + +#### Main changes +- Added a pricing page and paid Windows messaging, and sent Cloud navigation directly into the app. +- Expanded Den with skill hubs, a new `den-api`, and a smoother org-invite signup flow. +- Added developer log export, per-conversation draft persistence, and recovery after immediate send failures. + +#### Lines of code changed since previous release +19623 lines changed since `v0.11.198` (12501 insertions, 7122 deletions). + +#### Release importance +Major release: introduces major new commercial and Den team workflows while materially improving debugging and session resilience. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Added landing pricing and paid Windows conversion flows. +- Added Den skill hubs and migrated Den onto the new Hono-based `den-api`. +- Added exportable developer logs in the app's debug surface. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed session send failures so conversations can recover after an immediate error. +- Fixed draft persistence so conversation drafts stay scoped to the correct conversation. +- Fixed startup and sharing edge cases such as delayed host-info checks and unreliable shared access token reveal. + +#### Deprecated features +True + +#### Number of deprecated features +1 + +#### Deprecated details +- Removed the legacy `opkg` CLI integration as part of the release cleanup. + +## v0.11.200 + +#### Commit +`5cc7bbdd` + +#### Released at +`2026-04-03T15:22:13Z` + +#### Title +Cloud skills and team limits move into the core flow + +#### One-line summary +Brings Cloud team skills into the app, adds Den teams and deeper skill-hub management, and enforces org limits during creation. + +#### Main changes +- Added an OpenWork Cloud skills catalog to the Skills page, with install and share-to-team flows. +- Added Den teams plus full skill hub and skill editing and visibility management. +- Moved billing into org creation and enforced organization member limits before setup finishes. + +#### Lines of code changed since previous release +9000 lines changed since `v0.11.199` (7881 insertions, 1119 deletions). + +#### Release importance +Major release: adds substantial new Cloud and Den organization capabilities that materially expand how teams discover, share, and manage skills. + +#### Major improvements +True + +#### Number of major improvements +3 + +#### Major improvement details +- Added the OpenWork Cloud team skills catalog on the app Skills page. +- Added Den teams and full skill hub management across the org dashboard. +- Added billing-aware org creation with org limit enforcement. + +#### Major bugs resolved +False + +#### Number of major bugs resolved +0 + +#### Major bug fix details +None. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. diff --git a/changelog/release-tracker-2026-04-04.md b/changelog/release-tracker-2026-04-04.md new file mode 100644 index 0000000000..72cb7cb0c8 --- /dev/null +++ b/changelog/release-tracker-2026-04-04.md @@ -0,0 +1,115 @@ +# Release Changelog Tracker + +Internal preparation file for release summaries. This is not yet published to the changelog page or docs. + +## v0.11.201 + +#### Commit +`15725dfb` + +#### Released at +`2026-04-04T01:59:47Z` + +#### Title +Workspace lists collapse cleanly and Den organization setup recovers more reliably. + +#### One-line summary +Hides collapsed workspace task rows, steadies session loading, and fixes Den skill saving plus organization draft and invite recovery. + +#### Main changes +- Collapsed workspaces now hide task rows, empty states, and loading shells until reopened. +- Session loading stops refetch churn and early stream flicker. +- Den now saves skill metadata from frontmatter and restores pending org drafts and invite counts. + +#### Lines of code changed since previous release +3956 lines changed since `v0.11.200` (2440 insertions, 1516 deletions). + +#### Release importance +Minor release: focuses on interface polish and workflow fixes across the app and Den without adding a substantially new product capability. + +#### Major improvements +False + +#### Number of major improvements +0 + +#### Major improvement details +None. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Fixed collapsed workspace lists so hidden workspaces no longer leak session previews or loading states. +- Fixed session loading and streaming churn that could cause repeated fetches or visible flicker. +- Fixed Den skill saving and org management by parsing skill frontmatter correctly and restoring pending invite and draft state. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. + +## v0.11.202 + +#### Commit +`ff981742` + +#### Released at +`2026-04-04T20:45:30Z` + +#### Title +Translations fill out across the app and shared skill imports work again. + +#### One-line summary +Adds Thai, restores missing page translations, fixes migrated shared-skill links, and tightens docs and local desktop dev setup. + +#### Main changes +Completed localization coverage across the app and added Thai as a selectable language. + +Also released: + +- Restored shared-skill import links and canonical bundle fetch URLs. +- Added clearer automations and skill-import docs. +- Stopped desktop dev from reusing another checkout's Vite server. + +#### Lines of code changed since previous release +8103 lines changed since `v0.11.201` (7198 insertions, 905 deletions). + +#### Release importance +Minor release: expands localization coverage and fixes important import and desktop-dev paths without changing the core product model. + +#### Major improvements +True + +#### Number of major improvements +1 + +#### Major improvement details +- Added Thai and completed translation coverage across the app's shipped locales. + +#### Major bugs resolved +True + +#### Number of major bugs resolved +3 + +#### Major bug fix details +- Restored missing page translations and corrected locale labels so translated screens stop falling back unexpectedly. +- Restored migrated shared-skill links and canonical bundle fetch URLs so docs imports work again. +- Stopped desktop dev from reusing another checkout's Vite server. + +#### Deprecated features +False + +#### Number of deprecated features +0 + +#### Deprecated details +None. diff --git a/constants.json b/constants.json new file mode 100644 index 0000000000..aca25f3adf --- /dev/null +++ b/constants.json @@ -0,0 +1,3 @@ +{ + "opencodeVersion": "v1.4.9" +} diff --git a/design-prd.md b/design-prd.md deleted file mode 100644 index c614fc90a2..0000000000 --- a/design-prd.md +++ /dev/null @@ -1,486 +0,0 @@ -# OpenWork Product Requirements Document (PRD) - -## Summary - -OpenWork is an open-source **native GUI** (Tauri) that makes OpenCode feel like a polished consumer app for non-technical people. - -- OpenCode is the **engine**. -- OpenWork is the **experience**: onboarding, safety, permissions, progress, artifacts, and a premium-feeling UI. - -OpenWork competes directly with Anthropic’s Cowork conceptually, but stays open, local-first, and standards-based. - -## Goals - -- Deliver a **premium, extremely slick** user experience (desktop + mobile). -- Make OpenCode usable without a terminal. -- Launch/attach to an OpenCode instance automatically when OpenWork starts. -- Expose OpenCode primitives (sessions, messages, tools, permissions, files) in a non-technical UI. -- Provide long-running tasks with resumability. -- Provide explicit, understandable permissions and auditing. -- Work with **only the folders the user authorizes**. - -## Non-Goals - -- Replacing OpenCode’s CLI/TUI. -- Shipping a hosted SaaS in v1. -- Creating bespoke “magic” capabilities that don’t map to OpenCode APIs. - -## Target Users - -1. **Non-technical knowledge worker**: “Do this for me” workflows with guardrails. -2. **Mobile-first user**: start/monitor tasks from phone. -3. **Power user**: wants UI parity + speed + inspection. -4. **Admin/host**: manages a shared machine + profiles. - -## Success Metrics - -- < 5 minutes to first successful task on fresh install. -- > 80% task success without terminal fallback. -- Permission prompts understood/accepted (low confusion + low deny-by-accident). -- UI performance: 60fps; <100ms interaction latency; no jank. - -## Principles - -- **Parity**: UI actions map to OpenCode server APIs. -- **Transparency**: plans, steps, tool calls, permissions are visible. -- **Least privilege**: only user-authorized folders + explicit approvals. -- **Prompt is the workflow**: product logic lives in prompts, rules, and skills. -- **Graceful degradation**: if access is missing, guide the user. - ---- - -## Core Architecture - -OpenWork is a Tauri application with two runtime modes: - -### Mode A — Host (Desktop) - -- OpenWork runs on a desktop/laptop and **starts** OpenCode locally. -- The OpenCode server runs on loopback (default `127.0.0.1:4096`). -- OpenWork UI connects via the official SDK and listens to events. - -### Mode B — Client (Mobile) - -- OpenWork runs on iOS/Android as a **remote controller**. -- It connects to an already-running OpenCode server hosted by a trusted device. -- Pairing uses a QR code / one-time token and a secure transport (LAN or tunneled). - -This split makes mobile “first-class” without requiring the full engine to run on-device. - ---- - -## OpenCode Integration (Exact SDK + APIs) - -OpenWork uses the official JavaScript/TypeScript SDK: - -- Package: `@opencode-ai/sdk/v2` (UI should import `@opencode-ai/sdk/v2/client` to avoid Node-only server code) -- Purpose: type-safe client generated from OpenAPI spec - -### Engine Lifecycle - -#### Start server + client (Host mode) - -Use `createOpencode()` to launch the OpenCode server and create a client. - -```ts -import { createOpencode } from "@opencode-ai/sdk/v2"; - -const opencode = await createOpencode({ - hostname: "127.0.0.1", - port: 4096, - timeout: 5000, - config: { - model: "anthropic/claude-3-5-sonnet-20241022", - }, -}); - -const { client } = opencode; -// opencode.server.url is available -``` - -#### Connect to an existing server (Client mode) - -```ts -import { createOpencodeClient } from "@opencode-ai/sdk/v2/client"; - -const client = createOpencodeClient({ - baseUrl: "http://localhost:4096", - directory: "/path/to/project", -}); -``` - -### Health + Version - -- `client.global.health()` - - Used for startup checks, compatibility warnings, and diagnostics. - -### Event Streaming (Real-time UI) - -OpenWork must be real-time. It subscribes to SSE events: - -- `client.event.subscribe()` - -The UI uses these events to drive: - -- streaming assistant responses -- step-level tool execution timeline -- permission prompts -- session lifecycle changes - -### Sessions (Primary Primitive) - -OpenWork maps a “Task Run” to an OpenCode **Session**. - -Core methods: - -- `client.session.create()` -- `client.session.list()` -- `client.session.get()` -- `client.session.messages()` -- `client.session.prompt()` -- `client.session.abort()` -- `client.session.summarize()` - -### Files + Search - -OpenWork’s file browser and “what changed” UI are powered by: - -- `client.find.text()` -- `client.find.files()` -- `client.find.symbols()` -- `client.file.read()` -- `client.file.status()` - -### Permissions - -OpenWork must surface permission requests clearly and respond explicitly. - -- Permission response API: - - `client.permission.reply({ requestID, reply })` (where `reply` is `once` | `always` | `reject`) - -OpenWork UI should: - -1. Show what is being requested (scope + reason). -2. Provide choices (allow once / allow for session / deny). -3. Post the response to the server. -4. Record the decision in the run’s audit log. - -### Config + Providers - -OpenWork’s settings pages use: - -- `client.config.get()` -- `client.config.providers()` -- `client.auth.set()` (optional flow to store keys) - - -## When it comes to design - -use the design from ./design.ts that is your core reference for building the entire ui - -### Projects + Path - -- `client.project.list()` / `client.project.current()` -- `client.path.get()` - -OpenWork conceptually treats “workspace” as the current project/path. - -### Optional TUI Control (Advanced) - -The SDK exposes `client.tui.*` methods. OpenWork can optionally provide a “Developer Mode” screen to: - -- append/submit prompt -- open help/sessions/themes/models -- show toast - -This is optional and not required for non-technical MVP. - ---- - -## Folder Authorization Model - -OpenWork enforces folder access through **two layers**: - -1. **OpenWork UI authorization** - - user explicitly selects allowed folders via native picker - - OpenWork remembers allowed roots per profile - -2. **OpenCode server permissions** - - OpenCode requests permissions as needed - - OpenWork intercepts requests via events and displays them - -Rules: - -- Default deny for anything outside allowed roots. -- “Allow once” never expands persistent scope. -- “Allow for session” applies only to the session ID. -- “Always allow” (if offered) must be explicit and reversible. - ---- - -## Product Primitives (What OpenWork Exposes) - -OpenWork must feel like “OpenCode, but for everyone.” - -### 1) Tasks - -- A Task = a user-described outcome. -- A Run = an OpenCode session + event stream. - -### 2) Plans / Todo Lists - -OpenWork provides a first-class plan UI: - -- Plan is generated before execution (editable). -- Plan is updated during execution (step status + timestamps). -- Plan is stored as a structured artifact attached to the session (JSON) so it’s reconstructable. - -Implementation detail: - -- The plan is represented in OpenCode as structured `parts` (or a dedicated “plan message”) and mirrored in OpenWork. - -### 3) Steps - -- Each tool call becomes a step row with: - - tool name - - arguments summary - - permission state - - start/end time - - output preview - -### 4) Artifacts - -Artifacts are user-visible outputs: - -- files created/modified -- generated documents/spreadsheets/presentations -- exported logs and summaries - -OpenWork lists artifacts per run and supports open/share/download. - -### 5) Audit Log - -Every run provides an exportable audit log: - -- prompts -- plan -- tool calls -- permission decisions -- outputs - ---- - -## UI/UX Requirements (Slick as a Core Goal) - -### Design Targets - -- premium, calm, high-contrast -- subtle motion, springy transitions -- zero “developer vibes” in default mode - -### Performance Targets - -- 60fps animations -- <100ms input-to-feedback -- no blocking spinners (always show progress state) - -### Mobile-first Interaction - -- bottom navigation -- swipe gestures (dismiss, approve, cancel) -- haptics for major events -- adaptive layouts (phone/tablet) - -### Accessibility - -- WCAG 2.1 AA -- reduced motion mode -- screen-reader labels for steps + permissions - ---- - -## Functional Requirements - -### Onboarding - -- Host vs Client selection -- workspace selection (Host) -- connect to host (Client) -- provider/model setup -- first-run “hello world” task - -### Task Execution - -- create task -- plan preview and edit -- run with streaming updates -- pause/resume/cancel -- show artifacts and summaries - -### Permissions - -- clear prompts with “why” -- allow once/session -- audit of decisions - -### Templates - -- save a task as template -- variables + quick run - -### Scheduling (Future) - -- schedule template runs -- notify on completion - ---- - -## User Flow Map (Exhaustive) - -### 0. Install & Launch - -1. User installs OpenWork. -2. App launches. -3. App shows “Choose mode: Host / Client”. -4. Host: start local OpenCode via SDK. -5. Client: connect flow to an existing host. - -### 1. First-Run Onboarding (Host) - -1. Welcome + safety overview. -2. Workspace folder selection. -3. Allowed folders selection (can be multiple). -4. Provider/model configuration. -5. `global.health()` check. -6. Run a test session using `session.create()` + `session.prompt()`. -7. Success + sample templates. - -### 2. Pairing Onboarding (Client / Mobile) - -1. User selects “Client”. -2. UI explains it connects to a trusted host. -3. User scans QR code shown on host device. -4. Client verifies connection with `global.health()`. -5. Client can now list sessions and monitor runs. - -### 3. Runtime Health & Recovery - -1. UI pings `global.health()`. -2. If unhealthy: - - Host: attempt restart via `createOpencode()`. - - Client: show reconnect + diagnostics. - -### 4. Quick Task Flow - -1. User types goal. -2. OpenWork generates plan (structured). -3. User approves. -4. Create session: `session.create()`. -5. Send prompt: `session.prompt()`. -6. Subscribe to events: `event.subscribe()`. -7. Render streaming output + steps. -8. Show artifacts. - -### 5. Guided Task Flow - -1. Wizard collects goal, constraints, outputs. -2. Plan preview with “risky step” highlights. -3. Run execution with progress UI. - -### 6. File-Driven Task Flow - -1. User attaches files. -2. OpenWork injects context into session. -3. Execute prompt. - -### 7. Permissions Flow (Any) - -1. Event indicates permission request. -2. UI modal shows request. -3. User chooses allow/deny. -4. UI calls `client.permission.reply({ requestID, reply })`. -5. Run continues or fails gracefully. - -### 8. Cancel / Abort - -1. User clicks “Stop”. -2. UI calls `client.session.abort({ sessionID })`. -3. UI marks run stopped. - -### 9. Summarize - -1. User taps “Summarize”. -2. UI calls `client.session.summarize({ sessionID })`. -3. Summary displayed as an artifact. - -### 10. Run History - -1. UI calls `session.list()`. -2. Tap a session to load `session.messages()`. -3. UI reconstructs plan and steps. - -### 11. File Explorer + Search - -1. User searches: `find.text()`. -2. Open file: `file.read()`. -3. Show changed files: `file.status()`. - -### 12. Templates - -1. Save a plan + prompt as a template. -2. Re-run template creates a new session. - -### 13. Multi-user (Future) - -- separate profiles -- separate allowed folders -- separate providers/keys - ---- - -## Security & Privacy - -- Local-first by default. -- No secrets in git. -- Use OS keychain for credentials. -- Clear, explicit permissions. -- Exportable audit logs. - ---- - -## Open Questions - -- Best packaging strategy for Host mode engine (bundled vs user-installed Node/runtime). -- Best remote transport for mobile client (LAN only vs optional tunnel). -- Scheduling API surface (native in OpenCode server vs OpenWork-managed scheduler). - ---- - -## Milestones - -### v0.1 — Engine + Client - -- Host mode: start OpenCode via `createOpencode()`. -- Client mode: connect via `createOpencodeClient()`. -- Health screen + basic sessions list. - -### v0.2 — Full Run Loop - -- create session -- send prompt -- stream events -- display step timeline -- permission prompts - -### v0.3 — Premium UX - -- micro-interactions and animations -- mobile layouts + gestures -- templates - -### v1.0 — Public Open Source Release - -- strong onboarding -- multi-device pairing -- audit/export -- docs + examples diff --git a/design.ts b/design.ts deleted file mode 100644 index d73efc7d8d..0000000000 --- a/design.ts +++ /dev/null @@ -1,720 +0,0 @@ -import React, { useState, useEffect, useRef } from 'react'; -import { - Command, - Shield, - Zap, - Layout, - Settings, - ChevronRight, - Play, - CheckCircle2, - Circle, - AlertCircle, - FileText, - X, - Terminal, - Smartphone, - HardDrive, - Cpu, - MoreHorizontal, - ArrowRight, - Clock, - Menu, - Download, - Folder, - Plus, - Trash2 -} from 'lucide-react'; - -// --- Mock Data & Types --- - -const MOCK_TEMPLATES = [ - { id: 1, title: "Analyze Q3 Revenue", description: "Read CSVs and generate a PDF report", icon: "bar-chart" }, - { id: 2, title: "Clean Desktop", description: "Organize screenshots into folders", icon: "folder" }, - { id: 3, title: "Summarize Meeting", description: "Process audio transcript", icon: "mic" }, -]; - -const MOCK_RECENT_SESSIONS = [ - { id: 101, title: "Update Dependencies", status: "completed", date: "2 mins ago" }, - { id: 102, title: "Fix CSS Bug", status: "failed", date: "1 hour ago" }, - { id: 103, title: "Deploy to Prod", status: "waiting", date: "Yesterday" }, -]; - -// --- Components --- - -const Button = ({ children, variant = "primary", className = "", onClick, disabled }) => { - const baseStyle = "px-4 py-2.5 rounded-xl font-medium transition-all duration-200 flex items-center justify-center gap-2 active:scale-95 text-sm"; - const variants = { - primary: "bg-white text-black hover:bg-gray-100 shadow-lg shadow-white/5", - secondary: "bg-zinc-800 text-zinc-100 hover:bg-zinc-700 border border-zinc-700/50", - ghost: "bg-transparent text-zinc-400 hover:text-white hover:bg-zinc-800/50", - danger: "bg-red-500/10 text-red-400 hover:bg-red-500/20 border border-red-500/20", - outline: "border border-zinc-700 text-zinc-300 hover:border-zinc-500 bg-transparent", - }; - - return ( - - ); -}; - -const StatusBadge = ({ status }) => { - const styles = { - completed: "bg-emerald-500/10 text-emerald-400 border-emerald-500/20", - running: "bg-blue-500/10 text-blue-400 border-blue-500/20 animate-pulse", - waiting: "bg-amber-500/10 text-amber-400 border-amber-500/20", - failed: "bg-red-500/10 text-red-400 border-red-500/20", - stopped: "bg-zinc-500/10 text-zinc-400 border-zinc-500/20", - }; - - return ( - - - {status.charAt(0).toUpperCase() + status.slice(1)} - - ); -}; - -const Card = ({ children, className = "" }) => ( -
- {children} -
-); - -// --- Views --- - -const OnboardingView = ({ onComplete }) => { - const [step, setStep] = useState('mode-select'); // mode-select | working-dirs | connecting - const [mode, setMode] = useState(null); - const [folders, setFolders] = useState(['~/Documents/OpenWork']); - - const handleModeSelect = (selectedMode) => { - setMode(selectedMode); - if (selectedMode === 'host') { - setStep('working-dirs'); - } else { - startConnection(selectedMode); - } - }; - - const startConnection = (selectedMode) => { - setStep('connecting'); - // Simulate connection delay - setTimeout(() => { - onComplete(selectedMode); - }, 2000); - }; - - const addFolder = () => { - // Mock folder selection - setFolders([...folders, '~/Downloads/Project_' + Math.floor(Math.random() * 100)]); - }; - - const removeFolder = (index) => { - setFolders(folders.filter((_, i) => i !== index)); - }; - - if (step === 'connecting') { - return ( -
-
-
-
-
-
-
-
- -
-
-
-

- {mode === 'host' ? 'Starting OpenCode Engine...' : 'Searching for Host...'} -

-

- {mode === 'host' ? 'Initializing localhost:4096' : 'Verifying secure handshake'} -

-
-
-
- ); - } - - if (step === 'working-dirs') { - return ( -
-
- -
-
-
- -
-

Authorized Workspaces

-

- OpenWork runs locally. For your safety, please explicitly select which folders it is allowed to access. -

-
- -
- {folders.map((folder, idx) => ( -
-
- - {folder} -
- -
- ))} - - -
- -
- -

- You can always change these permissions later in Settings. -

-
-
-
- ); - } - - return ( -
-
- -
-
-
-
- -
-

OpenWork

-
-

- How would you like to run OpenWork today? -

-
- -
- - - -
-
-
- ); -}; - -const SessionView = ({ session, onBack }) => { - const [messages, setMessages] = useState([]); - const [steps, setSteps] = useState([]); - const [status, setStatus] = useState('idle'); // idle, planning, running, paused, completed - const [input, setInput] = useState(''); - const [permissionRequest, setPermissionRequest] = useState(null); - const [artifacts, setArtifacts] = useState([]); - - const messagesEndRef = useRef(null); - - // Auto-scroll - useEffect(() => { - messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); - }, [messages, steps]); - - // Simulate a run flow - const handleStart = () => { - if (!input.trim()) return; - - // 1. User Message - const userMsg = { id: 1, role: 'user', content: input }; - setMessages([userMsg]); - setStatus('planning'); - setInput(''); - - // 2. Simulate Planning (Delay) - setTimeout(() => { - const planMsg = { - id: 2, - role: 'assistant', - type: 'plan', - content: "I'll help you analyze that. Here is the plan:" - }; - setMessages(prev => [...prev, planMsg]); - - const initialSteps = [ - { id: 1, title: 'Read data files', status: 'pending' }, - { id: 2, title: 'Process revenue metrics', status: 'pending' }, - { id: 3, title: 'Generate PDF Report', status: 'pending' }, - ]; - setSteps(initialSteps); - setStatus('running'); - - // 3. Start Execution simulation - runStep(1, initialSteps); - }, 1500); - }; - - const runStep = (stepId, currentSteps) => { - // Update step to running - const updatedSteps = currentSteps.map(s => - s.id === stepId ? { ...s, status: 'running' } : s - ); - setSteps(updatedSteps); - - // Simulate work time - setTimeout(() => { - if (stepId === 1) { - // PERMISSION TRAP - setPermissionRequest({ - id: 'perm_123', - type: 'filesystem_read', - scope: '~/Documents/Finance/Q3', - reason: 'I need to read the CSV files to analyze revenue.', - stepId: 1 - }); - setStatus('paused'); - } else if (stepId === 2) { - completeStep(stepId, updatedSteps); - runStep(3, updatedSteps); - } else if (stepId === 3) { - completeStep(stepId, updatedSteps); - finishSession(); - } - }, 2000); - }; - - const handlePermission = (decision) => { - const currentStepId = permissionRequest.stepId; - setPermissionRequest(null); - - if (decision === 'deny') { - setStatus('failed'); - setMessages(prev => [...prev, { id: 99, role: 'system', content: 'Permission denied. Task aborted.', type: 'error' }]); - const newSteps = steps.map(s => s.id === currentStepId ? { ...s, status: 'failed' } : s); - setSteps(newSteps); - } else { - // Allowed - setStatus('running'); - // Add system log - setMessages(prev => [...prev, { id: 98, role: 'system', content: `Permission granted: ${decision}`, type: 'audit' }]); - completeStep(currentStepId, steps); - // Move to next step - runStep(2, steps); - } - }; - - const completeStep = (id, currentSteps) => { - const newSteps = currentSteps.map(s => - s.id === id ? { ...s, status: 'completed' } : s - ); - setSteps(newSteps); - }; - - const finishSession = () => { - setStatus('completed'); - setArtifacts([{ id: 1, name: 'Q3_Revenue_Report.pdf', size: '1.2MB' }]); - setMessages(prev => [...prev, { id: 100, role: 'assistant', content: 'Task complete! I have generated the report below.' }]); - }; - - return ( -
- {/* Header */} -
-
- -
-

Session #104

-
- - {status.charAt(0).toUpperCase() + status.slice(1)} -
-
-
-
- -
-
- - {/* Main Content Area */} -
- - {/* Left: Chat & Stream */} -
-
- - {/* Empty State */} - {messages.length === 0 && ( -
-
- -
-

Ready to work

-

- Describe a task. I'll create a plan, ask for permissions when needed, and execute it. -

-
- )} - - {/* Message Stream */} - {messages.map((msg) => ( -
- {msg.type === 'audit' ? ( -
- {msg.content} -
- ) : msg.type === 'error' ? ( -
- {msg.content} -
- ) : ( -
- {msg.content} -
- )} -
- ))} - - {/* Artifacts */} - {artifacts.length > 0 && ( -
-
-
- -
-
-
{artifacts[0].name}
-
{artifacts[0].size}
-
-
- -
- )} - -
-
-
- - {/* Right: Plan & Context (Desktop) */} -
-
- Execution Plan - {steps.filter(s => s.status === 'completed').length}/{steps.length} -
-
- {steps.length === 0 ? ( -
Plan will appear here...
- ) : ( - steps.map((step, idx) => ( -
- {idx !== steps.length - 1 && ( -
- )} -
- {step.status === 'completed' ? : - step.status === 'running' ?
: - step.status === 'failed' ? : - } -
-
- {step.title} -
-
- )) - )} -
-
-
- - {/* Input Area */} -
-
- setInput(e.target.value)} - onKeyDown={(e) => e.key === 'Enter' && handleStart()} - placeholder={status === 'running' ? "Task is running..." : "Ask OpenWork to do something..."} - className="w-full bg-zinc-900 border border-zinc-800 rounded-2xl py-4 pl-5 pr-14 text-white placeholder-zinc-500 focus:outline-none focus:ring-1 focus:ring-zinc-600 focus:border-zinc-600 transition-all disabled:opacity-50" - /> - -
-
- - {/* Permission Modal Overlay */} - {permissionRequest && ( -
-
-
-
-
- -
-
-

Permission Required

-

OpenCode needs access to your file system to continue.

-
-
- -
-
Reason
-

"{permissionRequest.reason}"

- -
Scope
-
- - {permissionRequest.scope} -
-
- -
- -
- - -
-
-
-
-
- )} -
- ); -}; - -const DashboardView = ({ onStartTask, isHost }) => { - return ( -
- {/* Sidebar */} - - - {/* Main Content */} -
-
-

Good Morning

-
- -
-
- -
- - {/* Quick Action */} -
-
-
-
-

What should we do today?

-

OpenWork can read files, run scripts, and generate reports.

-
- -
-
-
- - {/* Templates */} -
-
-

Quick Start Templates

- -
-
- {MOCK_TEMPLATES.map((t) => ( - - ))} -
-
- - {/* Recent Activity */} -
-

Recent Sessions

-
- {MOCK_RECENT_SESSIONS.map((s, i) => ( -
-
-
- #{s.id} -
-
-
{s.title}
-
- {s.date} -
-
-
-
- - -
-
- ))} -
-
-
-
-
- ); -}; - -// --- Main App Controller --- - -export default function App() { - const [view, setView] = useState('onboarding'); // onboarding, dashboard, session - const [mode, setMode] = useState(null); // host, client - - const handleOnboardingComplete = (selectedMode) => { - setMode(selectedMode); - setView('dashboard'); - }; - - const handleStartTask = () => { - setView('session'); - }; - - const handleBackToDashboard = () => { - setView('dashboard'); - }; - - return ( -
- {view === 'onboarding' && } - {view === 'dashboard' && } - {view === 'session' && } -
- ); -} \ No newline at end of file diff --git a/docs/mcp-ui-control-profile.md b/docs/mcp-ui-control-profile.md new file mode 100644 index 0000000000..77069631ec --- /dev/null +++ b/docs/mcp-ui-control-profile.md @@ -0,0 +1,239 @@ +# Control OpenWork from any MCP client + +OpenWork exposes its UI as an MCP server so any MCP-capable app can read what's on screen and run actions — no DOM scraping, no coordinates, no accessibility hacks. + +## Why this exists + +Apps like HandsFree let people control their computers hands-free using AI. But generic computer-use flows (screenshot → click coordinate) are slow, fragile, and need a vision model for every step. + +OpenWork takes a different approach: the app itself tells you what actions are available, what the current state is, and lets you execute actions by name. The MCP server wraps that surface so any MCP client gets a first-class, semantic control experience out of the box. + +This means: + +- **HandsFree** can drive OpenWork sessions, composer, navigation, and transcript without guessing pixels. +- **OpenCode** can automate OpenWork as part of a larger coding workflow. +- **Claude Desktop, Codex, Cursor**, or any MCP-compatible tool can add OpenWork control with a single config line. +- Your own app can do the same. + +> Want to control OpenWork Cloud workers and server APIs instead of the desktop UI? Check out the **OpenWork Cloud MCP** (separate package, coming soon). + +## Quick start with HandsFree + +HandsFree auto-discovers the OpenWork MCP server when both apps are running on the same machine. No config needed. + +1. Launch **OpenWork** (desktop app). +2. Launch **HandsFree**. +3. Open the HandsFree connector panel — you should see **OpenWork** with a green "Connected" status and an action count. + +That's it. HandsFree can now list your sessions, read transcripts, type into the composer, send prompts, and navigate the app — all through MCP. + +### What HandsFree can do once connected + +- `ui_snapshot` — see the current route, status, and available actions. +- `ui_list_actions` — get every action the app currently exposes (session controls, composer, navigation, etc.). +- `ui_execute_action` — run an action by ID, e.g. `session.create_task`, `composer.set_text`, `composer.send`. +- `ui_status` — check if OpenWork is running and the bridge is reachable. + +## Install + +```bash +npm install -g openwork-ui-mcp +``` + +Or run without installing: + +```bash +npx openwork-ui-mcp +``` + +> The package is [`openwork-ui-mcp` on npm](https://www.npmjs.com/package/openwork-ui-mcp). + +## Add to OpenCode + +Add the MCP server to your workspace or global `opencode.json`: + +```json +{ + "mcp": { + "openwork-ui": { + "type": "local", + "command": ["npx", "-y", "openwork-ui-mcp"], + "enabled": true + } + } +} +``` + +Then use the tools in any session: + +``` +> Use ui_snapshot to see what's on screen in OpenWork, then list the available sessions. +``` + +## Add to Claude Desktop or Codex + +Both use the same MCP config shape. Add to your `claude_desktop_config.json` or Codex MCP settings: + +```json +{ + "mcpServers": { + "openwork-ui": { + "command": "npx", + "args": ["-y", "openwork-ui-mcp"] + } + } +} +``` + +Restart the app. The four tools (`ui_status`, `ui_snapshot`, `ui_list_actions`, `ui_execute_action`) will appear in the tool list. + +## Add to your own MCP client + +If you're building an app that speaks MCP, you can connect to the OpenWork UI server the same way: + +```js +import { Client } from "@modelcontextprotocol/sdk/client/index.js"; +import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js"; + +const transport = new StdioClientTransport({ + command: "npx", + args: ["-y", "openwork-ui-mcp"], +}); +const client = new Client({ name: "my-app", version: "1.0.0" }); +await client.connect(transport); + +// Check if OpenWork is running +const status = await client.callTool({ name: "ui_status", arguments: {} }); +console.log(status); + +// See what actions are available +const actions = await client.callTool({ name: "ui_list_actions", arguments: {} }); +console.log(actions); + +// Type something into the composer +await client.callTool({ + name: "ui_execute_action", + arguments: { actionId: "composer.set_text", args: { text: "Hello from my app" } }, +}); +``` + +## Tool reference + +### `ui_status` + +Check if OpenWork is running and reachable. Returns connection status and app info. + +**No arguments.** + +Example response: + +``` +Connected to OpenWork +Bridge: http://127.0.0.1:52431 +Version: 1 +``` + +### `ui_snapshot` + +Get the current OpenWork UI state: active route, narration, visible actions, and status. Call this before acting to understand what the user sees. + +**No arguments.** + +Example response: + +``` +Route: /session/ses_abc123 +Status: ready +Narration: Ready. A controller can inspect and run visible actions. + +Actions (26): + session.create_task — Create a new task + session.list_sessions — List available sessions + composer.set_text — Type into the composer [text] + composer.send — Send the composer prompt + ... +``` + +### `ui_list_actions` + +List all UI control actions currently available. Each action has an `id` you can pass to `ui_execute_action`. + +**No arguments.** + +Returns the full list with labels, descriptions, and argument info. + +### `ui_execute_action` + +Execute an OpenWork UI action by its id. + +| Argument | Type | Description | +|----------|------|-------------| +| `actionId` | string | The action id from `ui_list_actions`, e.g. `session.create_task` or `composer.set_text` | +| `args` | object (optional) | JSON arguments for the action, if required | + +Example — list sessions: + +```json +{ "actionId": "session.list_sessions" } +``` + +Example — type into the composer: + +```json +{ "actionId": "composer.set_text", "args": { "text": "Summarize this project" } } +``` + +Example — send the composer prompt: + +```json +{ "actionId": "composer.send" } +``` + +## Available actions + +The exact list depends on the current OpenWork route and state. Common actions include: + +| Action | Description | +|--------|-------------| +| `session.create_task` | Create a new session in the selected workspace | +| `session.list_sessions` | List sessions across workspaces | +| `session.open` | Navigate to a session by ID | +| `session.rename` | Rename a session | +| `session.delete` | Delete a session (requires confirmation) | +| `session.latest_message` | Read the latest message in the current session | +| `session.read_transcript` | Read the last N messages as text | +| `composer.set_text` | Type text into the composer (visible typing animation) | +| `composer.send` | Send the current draft | +| `composer.stop` | Stop a running session | +| `session.scroll_top` | Scroll to the top of the transcript | +| `session.scroll_bottom` | Scroll to the bottom | +| `route.session` | Navigate to the session view | +| `route.settings.*` | Navigate to various settings pages | +| `command_palette.open` | Open the command palette | +| `session.model_picker.open` | Open the model picker | +| `status.docs.open` | Open documentation | +| `status.settings.open` | Open settings from the status bar | + +## Requirements + +- **OpenWork desktop** must be running. The MCP server connects to OpenWork's local bridge which starts automatically when the desktop app launches. +- **macOS** is the primary supported platform. The bridge uses Electron IPC and writes a discovery file to `~/Library/Application Support/com.differentai.openwork/`. +- The MCP server runs as a **stdio** process — your MCP client spawns it and communicates over stdin/stdout. + +## How it works under the hood + +``` +┌─────────────┐ MCP stdio ┌──────────────────┐ HTTP localhost ┌──────────────┐ +│ MCP client │ ←────────────────→ │ openwork-ui-mcp │ ←───────────────────→ │ OpenWork app │ +│ (HandsFree, │ │ (Node.js) │ │ (Electron) │ +│ OpenCode, │ │ │ │ │ +│ Codex) │ └──────────────────┘ └──────────────┘ +└─────────────┘ +``` + +1. OpenWork desktop starts a private localhost HTTP bridge on a random port, protected by a bearer token. +2. It writes a discovery file with the port and token so `openwork-ui-mcp` can find it. +3. `openwork-ui-mcp` reads the discovery file, proxies MCP tool calls to the bridge, and returns structured results. +4. The bridge calls `window.__openworkControl` inside the Electron renderer to snapshot state and execute actions. + +The bridge and discovery file are implementation details — you never need to touch them directly. Just point your MCP client at `openwork-ui-mcp`. diff --git a/ee/LICENSE b/ee/LICENSE new file mode 100644 index 0000000000..b0f07cba77 --- /dev/null +++ b/ee/LICENSE @@ -0,0 +1,110 @@ +# Functional Source License, Version 1.1, MIT Future License + +## Abbreviation + +FSL-1.1-MIT + +## Notice + +Copyright 2026 Different AI Inc + +## Terms and Conditions + +### Licensor ("We") + +The party offering the Software under these Terms and Conditions. + +### The Software + +The "Software" is each version of the software that we make available under +these Terms and Conditions, as indicated by our inclusion of these Terms and +Conditions with the Software. + +### License Grant + +Subject to your compliance with this License Grant and the Patents, +Redistribution and Trademark clauses below, we hereby grant you the right to +use, copy, modify, create derivative works, publicly perform, publicly display +and redistribute the Software for any Permitted Purpose identified below. + +### Permitted Purpose + +A Permitted Purpose is any purpose other than a Competing Use. A Competing Use +means making the Software available to others in a commercial product or +service that: + +1. substitutes for the Software; + +2. substitutes for any other product or service we offer using the Software + that exists as of the date we make the Software available; or + +3. offers the same or substantially similar functionality as the Software. + +Permitted Purposes specifically include using the Software: + +1. for your internal use and access; + +2. for non-commercial education; + +3. for non-commercial research; and + +4. in connection with professional services that you provide to a licensee + using the Software in accordance with these Terms and Conditions. + +### Patents + +To the extent your use for a Permitted Purpose would necessarily infringe our +patents, the license grant above includes a license under our patents. If you +make a claim against any party that the Software infringes or contributes to +the infringement of any patent, then your patent license to the Software ends +immediately. + +### Redistribution + +The Terms and Conditions apply to all copies, modifications and derivatives of +the Software. + +If you redistribute any copies, modifications or derivatives of the Software, +you must include a copy of or a link to these Terms and Conditions and not +remove any copyright notices provided in or with the Software. + +### Disclaimer + +THE SOFTWARE IS PROVIDED "AS IS" AND WITHOUT WARRANTIES OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING WITHOUT LIMITATION WARRANTIES OF FITNESS FOR A PARTICULAR +PURPOSE, MERCHANTABILITY, TITLE OR NON-INFRINGEMENT. + +IN NO EVENT WILL WE HAVE ANY LIABILITY TO YOU ARISING OUT OF OR RELATED TO THE +SOFTWARE, INCLUDING INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES, +EVEN IF WE HAVE BEEN INFORMED OF THEIR POSSIBILITY IN ADVANCE. + +### Trademarks + +Except for displaying the License Details and identifying us as the origin of +the Software, you have no right under these Terms and Conditions to use our +trademarks, trade names, service marks or product names. + +## Grant of Future License + +We hereby irrevocably grant you an additional license to use the Software under +the MIT license that is effective on the second anniversary of the date we make +the Software available. On or after that date, you may use the Software under +the MIT license, in which case the following will apply: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ee/apps/den-api/.env.example b/ee/apps/den-api/.env.example new file mode 100644 index 0000000000..16a15d4e86 --- /dev/null +++ b/ee/apps/den-api/.env.example @@ -0,0 +1,29 @@ +PORT=8790 +CORS_ORIGINS=http://localhost:3000,http://localhost:3001 +DATABASE_URL=mysql://root:password@127.0.0.1:3306/den +BETTER_AUTH_SECRET=replace-with-32-plus-character-secret +# Required dedicated DB encryption key for encrypted columns. Minimum 32 chars. +# Generate one with: openssl rand -base64 128 +DEN_DB_ENCRYPTION_KEY= +BETTER_AUTH_URL=http://localhost:8790 +DEN_BETTER_AUTH_TRUSTED_ORIGINS=http://localhost:3000,http://localhost:3001 +LOOPS_TRANSACTIONAL_ID_DEN_VERIFY_EMAIL=replace-with-loops-template-id +LOOPS_TRANSACTIONAL_ID_DEN_ORG_INVITE_EMAIL=replace-with-loops-template-id +PROVISIONER_MODE=daytona +WORKER_URL_TEMPLATE=https://workers.local/{workerId} +WORKER_ACTIVITY_BASE_URL=http://localhost:8790 +RENDER_API_KEY= +RENDER_OWNER_ID= +RENDER_WORKER_PUBLIC_DOMAIN_SUFFIX= +VERCEL_TOKEN= +VERCEL_DNS_DOMAIN= +POLAR_FEATURE_GATE_ENABLED=false +POLAR_API_BASE= +POLAR_ACCESS_TOKEN= +POLAR_PRODUCT_ID= +POLAR_BENEFIT_ID= +POLAR_SUCCESS_URL= +POLAR_RETURN_URL= +DAYTONA_API_KEY= +DAYTONA_API_URL=https://app.daytona.io/api +OPENWORK_DEV_MODE=1 diff --git a/ee/apps/den-api/README.md b/ee/apps/den-api/README.md new file mode 100644 index 0000000000..2f4433542f --- /dev/null +++ b/ee/apps/den-api/README.md @@ -0,0 +1,61 @@ +# Den API + +Hono-based Den control plane implementation (`den-api`, formerly `den-controller`). + +This package is the active Den control plane implementation. + +It carries the full migrated Den API route surface in a foldered Hono structure so agents can navigate one area at a time without scanning the whole service. + +## Quick start + +```bash +pnpm --filter @openwork-ee/den-api dev:local +``` + +## Local demo org seed + +With a local Den MySQL database running, seed a demo organization: + +```bash +pnpm --filter @openwork-ee/den-api seed:demo-org +``` + +This creates `Acme Robotics` with demo users, teams, pending invites, and an imported Anthropic Knowledge Work Plugins marketplace. It is guarded by `OPENWORK_DEV_MODE=1`, defaults to the local Den DB URL, and does not create workers or active external integrations. + +Default owner login: `alex@acme.test` / `OpenWorkDemo123!`. + +## Current routes + +- `GET /` -> `302 https://openworklabs.com` +- `GET /health` +- Better Auth mount at `/api/auth/*` +- desktop handoff routes under `/v1/auth/*` +- current user routes under `/v1/me*` +- organization routes under `/v1/orgs*` +- admin routes under `/v1/admin*` +- worker lifecycle and billing routes under `/v1/workers*` + +## Folder map + +- `src/routes/auth/`: Better Auth mount + desktop handoff endpoints +- `src/routes/me/`: current user and current user's org resolution routes +- `src/routes/org/`: organization CRUD-ish surfaces, split by area +- `src/routes/admin/`: admin-only reporting endpoints +- `src/routes/workers/`: worker lifecycle, billing, runtime, and heartbeat endpoints +- `src/middleware/`: reusable Hono middleware for auth context, org context, teams, and validation + +Each major folder also has its own `README.md` so future agents can inspect one area in isolation. + +## TypeID validation + +- Shared Den TypeID validation lives in `ee/packages/utils/src/typeid.ts`. +- Use `typeId.schema("...")` or the compatibility helpers like `normalizeDenTypeId("...", value)` when an endpoint accepts or returns a Den TypeID. +- `ee/apps/den-api/src/openapi.ts` exposes `denTypeIdSchema(...)` so path params, request bodies, and response fields all share the same validation rules and Swagger examples. +- Swagger now documents Den IDs with their required prefix and fixed 26-character TypeID suffix, so invalid IDs fail request validation before route logic runs. + +## Migration approach + +1. Keep `den-api` (formerly `den-controller`) as the source of truth for Den control-plane behavior. +2. Add endpoints in focused Hono route groups one surface at a time. +3. Reuse shared middleware and Zod validators instead of duplicating request/session/org plumbing. +4. Leave a short README in each route area when the structure changes so later agents can recover context fast. diff --git a/ee/apps/den-api/package.json b/ee/apps/den-api/package.json new file mode 100644 index 0000000000..55beb8332b --- /dev/null +++ b/ee/apps/den-api/package.json @@ -0,0 +1,42 @@ +{ + "name": "@openwork-ee/den-api", + "private": true, + "type": "module", + "scripts": { + "dev": "OPENWORK_DEV_MODE=1 tsx watch src/server.ts", + "dev:local": "sh -lc 'OPENWORK_DEV_MODE=1 PORT=${DEN_API_PORT:-8790} tsx watch src/server.ts'", + "build": "node ./scripts/build.mjs", + "build:den-db": "pnpm --filter @openwork-ee/den-db build", + "seed:demo-org": "pnpm run build:den-db && sh -lc 'DEN_WEB_PORT=${DEN_WEB_PORT:-3005}; OPENWORK_DEV_MODE=${OPENWORK_DEV_MODE:-1} DATABASE_URL=${DATABASE_URL:-mysql://root:password@127.0.0.1:3306/openwork_den} DEN_DB_ENCRYPTION_KEY=${DEN_DB_ENCRYPTION_KEY:-local-dev-db-encryption-key-please-change-1234567890} BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET:-local-dev-secret-not-for-production-use!!} BETTER_AUTH_URL=${BETTER_AUTH_URL:-http://localhost:$DEN_WEB_PORT} tsx scripts/seed-demo-org.ts'", + "start": "node dist/server.js" + }, + "dependencies": { + "@better-auth/api-key": "^1.5.6", + "@better-auth/oauth-provider": "^1.5.6", + "@daytonaio/sdk": "^0.150.0", + "@hono/mcp": "^0.2.5", + "@hono/node-server": "^1.13.8", + "@hono/standard-validator": "^0.2.2", + "@hono/swagger-ui": "^0.6.1", + "@modelcontextprotocol/sdk": "^1.29.0", + "@openwork-ee/den-db": "workspace:*", + "@openwork-ee/utils": "workspace:*", + "@openwork/types": "workspace:*", + "@standard-community/standard-json": "^0.3.5", + "@standard-community/standard-openapi": "^0.2.9", + "@standard-schema/spec": "^1.1.0", + "better-auth": "^1.5.6", + "better-call": "^1.3.2", + "dotenv": "^16.4.5", + "hono": "^4.7.2", + "hono-openapi": "^1.3.0", + "openapi-types": "^12.1.3", + "zod": "^4.3.6" + }, + "devDependencies": { + "@types/json-schema": "^7.0.15", + "@types/node": "^20.11.30", + "tsx": "^4.15.7", + "typescript": "^5.5.4" + } +} diff --git a/ee/apps/den-api/scripts/build.mjs b/ee/apps/den-api/scripts/build.mjs new file mode 100644 index 0000000000..2e9bb5b775 --- /dev/null +++ b/ee/apps/den-api/scripts/build.mjs @@ -0,0 +1,59 @@ +import { spawnSync } from "node:child_process" +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs" +import path from "node:path" +import { fileURLToPath } from "node:url" + +const scriptDir = path.dirname(fileURLToPath(import.meta.url)) +const serviceDir = path.resolve(scriptDir, "..") +const repoRoot = path.resolve(serviceDir, "..", "..", "..") +const desktopPackagePath = path.join(repoRoot, "apps", "desktop", "package.json") +const generatedVersionPath = path.join(serviceDir, "src", "generated", "app-version.ts") +const pnpmCommand = process.platform === "win32" ? "pnpm.cmd" : "pnpm" +const fallbackAppVersion = "0.0.0" + +function readDesktopVersion() { + if (!existsSync(desktopPackagePath)) { + // The Den API is built inside contexts (e.g. the Docker image used by + // `packaging/docker/den-dev-up.sh`) that intentionally do not ship the + // Tauri desktop sources. Falling back lets the container image build + // without copying unrelated packages; consumers that need the real + // version can override via DEN_API_LATEST_APP_VERSION. + console.warn(`Desktop package.json not found at ${desktopPackagePath}; using fallback version ${fallbackAppVersion}`) + return fallbackAppVersion + } + + const packageJson = JSON.parse(readFileSync(desktopPackagePath, "utf8")) + const version = packageJson.version?.trim() + + if (!version) { + throw new Error(`Desktop version missing in ${desktopPackagePath}`) + } + + return version +} + +function writeGeneratedVersionFile(latestAppVersion) { + mkdirSync(path.dirname(generatedVersionPath), { recursive: true }) + writeFileSync( + generatedVersionPath, + `export const BUILD_LATEST_APP_VERSION = ${JSON.stringify(latestAppVersion)} as const\n`, + ) +} + +function run(command, args) { + const result = spawnSync(command, args, { + cwd: serviceDir, + env: process.env, + stdio: "inherit", + }) + + if (result.status !== 0) { + process.exit(result.status ?? 1) + } +} + +process.env.DEN_API_LATEST_APP_VERSION = process.env.DEN_API_LATEST_APP_VERSION || readDesktopVersion() +writeGeneratedVersionFile(process.env.DEN_API_LATEST_APP_VERSION) + +run(pnpmCommand, ["run", "build:den-db"]) +run(pnpmCommand, ["exec", "tsc", "-p", "tsconfig.json"]) diff --git a/ee/apps/den-api/scripts/seed-demo-org.ts b/ee/apps/den-api/scripts/seed-demo-org.ts new file mode 100644 index 0000000000..574e367c93 --- /dev/null +++ b/ee/apps/den-api/scripts/seed-demo-org.ts @@ -0,0 +1,1041 @@ +import { and, eq, inArray } from "@openwork-ee/den-db/drizzle" +import { + AuthUserTable, + ConfigObjectAccessGrantTable, + ConfigObjectTable, + ConfigObjectVersionTable, + InvitationTable, + MarketplaceAccessGrantTable, + MarketplacePluginTable, + MarketplaceTable, + MemberTable, + OrganizationTable, + PluginAccessGrantTable, + PluginConfigObjectTable, + PluginTable, + TeamMemberTable, + TeamTable, +} from "@openwork-ee/den-db/schema" +import { createDenTypeId } from "@openwork-ee/utils/typeid" +import { auth } from "../src/auth.js" +import { db } from "../src/db.js" +import { env } from "../src/env.js" +import { seedDefaultOrganizationRoles } from "../src/orgs.js" + +const RESET_MODE = process.argv.includes("--reset") + +type UserId = typeof AuthUserTable.$inferSelect.id +type OrganizationId = typeof OrganizationTable.$inferSelect.id +type MemberId = typeof MemberTable.$inferSelect.id +type TeamId = typeof TeamTable.$inferSelect.id +type MarketplaceId = typeof MarketplaceTable.$inferSelect.id +type PluginId = typeof PluginTable.$inferSelect.id +type ConfigObjectId = typeof ConfigObjectTable.$inferSelect.id +type ConfigObjectType = typeof ConfigObjectTable.$inferSelect.objectType + +type DemoPerson = { + email: string + name: string + role: "admin" | "member" | "owner" + teams: string[] +} + +type DemoPlugin = { + description: string + orgWide?: boolean + slug: string + teamAccess: string[] +} + +type GithubContentEntry = { + download_url: string | null + name: string + path: string + type: "dir" | "file" | string +} + +type PluginContentObject = { + description: string | null + normalizedPayloadJson?: Record + objectType: ConfigObjectType + path: string + rawSourceText: string + title: string +} + +const DEMO_ORG_NAME = process.env.DEN_DEMO_ORG_NAME?.trim() || "Acme Robotics" +const DEMO_ORG_SLUG = process.env.DEN_DEMO_ORG_SLUG?.trim() || "acme-robotics-demo" +const DEMO_EMAIL_DOMAIN = process.env.DEN_DEMO_EMAIL_DOMAIN?.trim() || "acme.test" +const DEMO_OWNER_EMAIL = process.env.DEN_DEMO_OWNER_EMAIL?.trim() || `alex@${DEMO_EMAIL_DOMAIN}` +const DEMO_OWNER_PASSWORD = process.env.DEN_DEMO_OWNER_PASSWORD?.trim() || "OpenWorkDemo123!" +const SHOULD_FETCH_GITHUB = (process.env.DEN_DEMO_SEED_FETCH_GITHUB ?? "1").trim() !== "0" +const GITHUB_TOKEN = process.env.GITHUB_TOKEN?.trim() +const GITHUB_REPO = "anthropics/knowledge-work-plugins" +const GITHUB_REF = process.env.DEN_DEMO_PLUGIN_REF?.trim() || "main" +const GITHUB_RAW_BASE = `https://raw.githubusercontent.com/${GITHUB_REPO}/${GITHUB_REF}` +const GITHUB_API_BASE = `https://api.github.com/repos/${GITHUB_REPO}/contents` +const SOURCE_REVISION_REF = `${GITHUB_REPO}@${GITHUB_REF}` +const MAX_RAW_SOURCE_CHARS = 18_000 + +const demoPeople: DemoPerson[] = [ + { email: DEMO_OWNER_EMAIL, name: "Alex Chen", role: "owner", teams: ["Leadership", "Product"] }, + { email: `priya@${DEMO_EMAIL_DOMAIN}`, name: "Priya Shah", role: "admin", teams: ["Leadership", "Engineering"] }, + { email: `mateo@${DEMO_EMAIL_DOMAIN}`, name: "Mateo Rivera", role: "admin", teams: ["Leadership", "Sales"] }, + { email: `morgan@${DEMO_EMAIL_DOMAIN}`, name: "Morgan Lee", role: "member", teams: ["Product", "Design"] }, + { email: `nora@${DEMO_EMAIL_DOMAIN}`, name: "Nora Patel", role: "member", teams: ["Product", "Data"] }, + { email: `jamal@${DEMO_EMAIL_DOMAIN}`, name: "Jamal Brooks", role: "member", teams: ["Engineering"] }, + { email: `sofia@${DEMO_EMAIL_DOMAIN}`, name: "Sofia Garcia", role: "member", teams: ["Engineering", "Operations"] }, + { email: `ivy@${DEMO_EMAIL_DOMAIN}`, name: "Ivy Nguyen", role: "member", teams: ["Design"] }, + { email: `liam@${DEMO_EMAIL_DOMAIN}`, name: "Liam O'Connor", role: "member", teams: ["Sales"] }, + { email: `olivia@${DEMO_EMAIL_DOMAIN}`, name: "Olivia Martin", role: "member", teams: ["Sales", "Marketing"] }, + { email: `harper@${DEMO_EMAIL_DOMAIN}`, name: "Harper Wilson", role: "member", teams: ["Support"] }, + { email: `kenji@${DEMO_EMAIL_DOMAIN}`, name: "Kenji Tanaka", role: "member", teams: ["Support", "Operations"] }, + { email: `zoe@${DEMO_EMAIL_DOMAIN}`, name: "Zoe Kim", role: "member", teams: ["Marketing"] }, + { email: `sam@${DEMO_EMAIL_DOMAIN}`, name: "Sam Okafor", role: "member", teams: ["Finance"] }, + { email: `maya@${DEMO_EMAIL_DOMAIN}`, name: "Maya Singh", role: "member", teams: ["Legal"] }, + { email: `ezra@${DEMO_EMAIL_DOMAIN}`, name: "Ezra Cohen", role: "member", teams: ["Data", "Engineering"] }, + { email: `camila@${DEMO_EMAIL_DOMAIN}`, name: "Camila Torres", role: "member", teams: ["Human Resources", "Operations"] }, +] + +const pendingInvites = [ + { email: `riley@${DEMO_EMAIL_DOMAIN}`, role: "member", team: "Engineering" }, + { email: `taylor@${DEMO_EMAIL_DOMAIN}`, role: "member", team: "Sales" }, + { email: `jordan@${DEMO_EMAIL_DOMAIN}`, role: "admin", team: "Leadership" }, +] + +const demoPlugins: DemoPlugin[] = [ + { + description: "Manage tasks, plan your day, and build up memory of important context about your work. Syncs with your calendar, email, and chat to keep everything organized and on track.", + orgWide: true, + slug: "productivity", + teamAccess: ["Leadership", "Operations"], + }, + { + description: "Search across all of your company's tools in one place. Find anything across email, chat, documents, and wikis without switching between apps.", + orgWide: true, + slug: "enterprise-search", + teamAccess: ["Leadership", "Product", "Support"], + }, + { + description: "Prospect, craft outreach, and build deal strategy faster. Prep for calls, manage your pipeline, and write personalized messaging that moves deals forward.", + slug: "sales", + teamAccess: ["Sales", "Marketing"], + }, + { + description: "Triage tickets, draft responses, package escalations, research customer context, and turn resolved issues into knowledge base articles.", + slug: "customer-support", + teamAccess: ["Support", "Product"], + }, + { + description: "Write feature specs, plan roadmaps, and synthesize user research faster. Keep stakeholders updated and stay ahead of the competitive landscape.", + slug: "product-management", + teamAccess: ["Product", "Design", "Engineering"], + }, + { + description: "Create content, plan campaigns, and analyze performance across marketing channels. Maintain brand voice consistency, track competitors, and report on what's working.", + slug: "marketing", + teamAccess: ["Marketing", "Sales"], + }, + { + description: "Speed up contract review, NDA triage, and compliance workflows for in-house legal teams. Draft legal briefs, organize precedent research, and manage institutional knowledge.", + slug: "legal", + teamAccess: ["Legal", "Leadership"], + }, + { + description: "Streamline finance and accounting workflows, from journal entries and reconciliation to financial statements and variance analysis.", + slug: "finance", + teamAccess: ["Finance", "Leadership"], + }, + { + description: "Write SQL, explore datasets, and generate insights faster. Build visualizations and dashboards, and turn raw data into clear stories for stakeholders.", + slug: "data", + teamAccess: ["Data", "Product", "Engineering"], + }, + { + description: "Streamline engineering workflows — standups, code review, architecture decisions, incident response, and technical documentation.", + slug: "engineering", + teamAccess: ["Engineering", "Product"], + }, + { + description: "Accelerate design workflows — critique, design system management, UX writing, accessibility audits, research synthesis, and dev handoff.", + slug: "design", + teamAccess: ["Design", "Product"], + }, + { + description: "Optimize business operations — vendor management, process documentation, change management, capacity planning, and compliance tracking.", + slug: "operations", + teamAccess: ["Operations", "Finance", "Human Resources"], + }, + { + description: "Streamline people operations — recruiting, onboarding, performance reviews, compensation analysis, and policy guidance.", + slug: "human-resources", + teamAccess: ["Human Resources", "Leadership"], + }, + { + description: "View, annotate, and sign PDFs in a live interactive viewer for contracts, forms, and approvals.", + slug: "pdf-viewer", + teamAccess: ["Legal", "Finance", "Operations"], + }, +] + +function assertSafeDevTarget() { + if (!env.devMode) { + throw new Error("Refusing to seed demo data unless OPENWORK_DEV_MODE=1.") + } + if (env.dbMode !== "mysql") { + throw new Error(`Refusing to seed demo data into DB_MODE=${env.dbMode}; use local MySQL dev mode.`) + } + + const parsed = env.databaseUrl ? new URL(env.databaseUrl) : null + const host = parsed?.hostname ?? "" + const allowNonLocal = process.env.DEN_DEMO_SEED_ALLOW_NONLOCAL === "1" + const localHosts = new Set(["127.0.0.1", "localhost", "mysql"]) + if (!allowNonLocal && !localHosts.has(host)) { + throw new Error(`Refusing to seed non-local database host '${host}'. Set DEN_DEMO_SEED_ALLOW_NONLOCAL=1 to override.`) + } +} + +function githubHeaders() { + return { + Accept: "application/vnd.github+json", + "User-Agent": "openwork-den-demo-seed", + ...(GITHUB_TOKEN ? { Authorization: `Bearer ${GITHUB_TOKEN}` } : {}), + } +} + +async function fetchJson(url: string): Promise { + if (!SHOULD_FETCH_GITHUB) return null + try { + const response = await fetch(url, { headers: githubHeaders() }) + if (!response.ok) return null + return await response.json() as T + } catch { + return null + } +} + +async function fetchText(url: string): Promise { + if (!SHOULD_FETCH_GITHUB) return null + try { + const response = await fetch(url, { headers: githubHeaders() }) + if (!response.ok) return null + return await response.text() + } catch { + return null + } +} + +function trimForEncryptedText(value: string) { + if (value.length <= MAX_RAW_SOURCE_CHARS) return value + return `${value.slice(0, MAX_RAW_SOURCE_CHARS)}\n\n` +} + +function fileNameFromPath(path: string) { + return path.split("/").pop() ?? path +} + +function extensionFromPath(path: string) { + const fileName = fileNameFromPath(path) + const dotIndex = fileName.lastIndexOf(".") + return dotIndex >= 0 ? fileName.slice(dotIndex + 1) : null +} + +function titleFromPath(path: string) { + const fileName = fileNameFromPath(path).replace(/\.[^.]+$/, "") + return fileName + .split(/[-_\s]+/g) + .filter(Boolean) + .map((part) => part.charAt(0).toUpperCase() + part.slice(1)) + .join(" ") || path +} + +function deriveSearchText(input: { description?: string | null; rawSourceText?: string | null; title: string }) { + return [input.title, input.description, input.rawSourceText].filter(Boolean).join("\n") || null +} + +async function ensureSignedInOwnerUser() { + const existing = await db + .select() + .from(AuthUserTable) + .where(eq(AuthUserTable.email, DEMO_OWNER_EMAIL.toLowerCase())) + .limit(1) + + if (!existing[0]) { + await (auth.api as unknown as { + signUpEmail(input: { body: { email: string; name: string; password: string } }): Promise + }).signUpEmail({ + body: { + email: DEMO_OWNER_EMAIL.toLowerCase(), + name: demoPeople[0]?.name ?? "Demo Owner", + password: DEMO_OWNER_PASSWORD, + }, + }) + } + + const rows = await db + .select() + .from(AuthUserTable) + .where(eq(AuthUserTable.email, DEMO_OWNER_EMAIL.toLowerCase())) + .limit(1) + const user = rows[0] + if (!user) throw new Error(`Failed to create demo owner ${DEMO_OWNER_EMAIL}.`) + + await db + .update(AuthUserTable) + .set({ emailVerified: true, name: demoPeople[0]?.name ?? user.name, updatedAt: new Date() }) + .where(eq(AuthUserTable.id, user.id)) + + return user.id +} + +async function ensureDisplayUser(person: DemoPerson): Promise { + if (person.email.toLowerCase() === DEMO_OWNER_EMAIL.toLowerCase()) { + return ensureSignedInOwnerUser() + } + + const email = person.email.toLowerCase() + const existing = await db.select().from(AuthUserTable).where(eq(AuthUserTable.email, email)).limit(1) + if (existing[0]) { + await db + .update(AuthUserTable) + .set({ emailVerified: true, name: person.name, updatedAt: new Date() }) + .where(eq(AuthUserTable.id, existing[0].id)) + return existing[0].id + } + + const id = createDenTypeId("user") + const now = new Date() + await db.insert(AuthUserTable).values({ + createdAt: now, + email, + emailVerified: true, + id, + image: null, + name: person.name, + updatedAt: now, + }) + return id +} + +async function ensureOrganization(ownerUserId: UserId): Promise { + const existing = await db.select().from(OrganizationTable).where(eq(OrganizationTable.slug, DEMO_ORG_SLUG)).limit(1) + const metadata = { + demoSeed: { + source: "den-api seed:demo-org", + updatedAt: new Date().toISOString(), + }, + limits: { + members: 100, + workers: 0, + }, + } + + if (existing[0]) { + await db + .update(OrganizationTable) + .set({ + allowedEmailDomains: [DEMO_EMAIL_DOMAIN], + metadata, + name: DEMO_ORG_NAME, + updatedAt: new Date(), + }) + .where(eq(OrganizationTable.id, existing[0].id)) + await seedDefaultOrganizationRoles(existing[0].id) + await ensureMember(existing[0].id, ownerUserId, "owner") + return existing[0].id + } + + const id = createDenTypeId("organization") + await db.insert(OrganizationTable).values({ + allowedEmailDomains: [DEMO_EMAIL_DOMAIN], + id, + logo: null, + metadata, + name: DEMO_ORG_NAME, + slug: DEMO_ORG_SLUG, + }) + await seedDefaultOrganizationRoles(id) + await ensureMember(id, ownerUserId, "owner") + return id +} + +async function ensureMember(organizationId: OrganizationId, userId: UserId, role: DemoPerson["role"]): Promise { + const existing = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.organizationId, organizationId), eq(MemberTable.userId, userId))) + .limit(1) + + if (existing[0]) { + await db.update(MemberTable).set({ role }).where(eq(MemberTable.id, existing[0].id)) + return existing[0].id + } + + const id = createDenTypeId("member") + await db.insert(MemberTable).values({ id, organizationId, role, userId }) + return id +} + +async function ensureTeam(organizationId: OrganizationId, name: string): Promise { + const existing = await db + .select() + .from(TeamTable) + .where(and(eq(TeamTable.organizationId, organizationId), eq(TeamTable.name, name))) + .limit(1) + if (existing[0]) return existing[0].id + + const id = createDenTypeId("team") + await db.insert(TeamTable).values({ id, name, organizationId }) + return id +} + +async function ensureTeamMember(teamId: TeamId, orgMembershipId: MemberId) { + const existing = await db + .select() + .from(TeamMemberTable) + .where(and(eq(TeamMemberTable.teamId, teamId), eq(TeamMemberTable.orgMembershipId, orgMembershipId))) + .limit(1) + if (existing[0]) return existing[0].id + + const id = createDenTypeId("teamMember") + await db.insert(TeamMemberTable).values({ id, orgMembershipId, teamId }) + return id +} + +async function ensureInvitation(input: { + email: string + inviterId: UserId + organizationId: OrganizationId + role: string + teamId: TeamId | null +}) { + const email = input.email.toLowerCase() + const expiresAt = new Date(Date.now() + 1000 * 60 * 60 * 24 * 14) + const existing = await db + .select() + .from(InvitationTable) + .where(and(eq(InvitationTable.organizationId, input.organizationId), eq(InvitationTable.email, email))) + .limit(1) + + if (existing[0]) { + await db + .update(InvitationTable) + .set({ expiresAt, inviterId: input.inviterId, role: input.role, status: "pending", teamId: input.teamId }) + .where(eq(InvitationTable.id, existing[0].id)) + return existing[0].id + } + + const id = createDenTypeId("invitation") + await db.insert(InvitationTable).values({ + email, + expiresAt, + id, + inviterId: input.inviterId, + organizationId: input.organizationId, + role: input.role, + status: "pending", + teamId: input.teamId, + }) + return id +} + +async function ensureMarketplace(input: { createdByOrgMembershipId: MemberId; organizationId: OrganizationId }): Promise { + const name = "Anthropic Knowledge Work Plugins" + const description = `Demo marketplace seeded from ${GITHUB_REPO}. Plugins are imported into Den DB for local demos; no external integrations are connected.` + const existing = await db + .select() + .from(MarketplaceTable) + .where(and(eq(MarketplaceTable.organizationId, input.organizationId), eq(MarketplaceTable.name, name))) + .limit(1) + + if (existing[0]) { + await db + .update(MarketplaceTable) + .set({ createdByOrgMembershipId: input.createdByOrgMembershipId, deletedAt: null, description, status: "active", updatedAt: new Date() }) + .where(eq(MarketplaceTable.id, existing[0].id)) + await ensureMarketplaceAccessGrant({ ...input, marketplaceId: existing[0].id, role: "viewer" }) + return existing[0].id + } + + const id = createDenTypeId("marketplace") + await db.insert(MarketplaceTable).values({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + deletedAt: null, + description, + id, + name, + organizationId: input.organizationId, + status: "active", + }) + await ensureMarketplaceAccessGrant({ ...input, marketplaceId: id, role: "viewer" }) + return id +} + +async function ensureMarketplaceAccessGrant(input: { + createdByOrgMembershipId: MemberId + marketplaceId: MarketplaceId + organizationId: OrganizationId + role: "manager" | "viewer" +}) { + const existing = await db + .select() + .from(MarketplaceAccessGrantTable) + .where(and(eq(MarketplaceAccessGrantTable.marketplaceId, input.marketplaceId), eq(MarketplaceAccessGrantTable.orgWide, true))) + .limit(1) + if (existing[0]) { + await db + .update(MarketplaceAccessGrantTable) + .set({ createdByOrgMembershipId: input.createdByOrgMembershipId, orgWide: true, removedAt: null, role: input.role }) + .where(eq(MarketplaceAccessGrantTable.id, existing[0].id)) + return existing[0].id + } + + const id = createDenTypeId("marketplaceAccessGrant") + await db.insert(MarketplaceAccessGrantTable).values({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + id, + marketplaceId: input.marketplaceId, + organizationId: input.organizationId, + orgMembershipId: null, + orgWide: true, + role: input.role, + teamId: null, + }) + return id +} + +async function ensurePlugin(input: { + createdByOrgMembershipId: MemberId + marketplaceId: MarketplaceId + organizationId: OrganizationId + plugin: DemoPlugin +}): Promise { + const existing = await db + .select() + .from(PluginTable) + .where(and(eq(PluginTable.organizationId, input.organizationId), eq(PluginTable.name, input.plugin.slug))) + .limit(1) + + const description = `${input.plugin.description}\n\nSource: https://github.com/${GITHUB_REPO}/tree/${GITHUB_REF}/${input.plugin.slug}` + let pluginId: PluginId + if (existing[0]) { + pluginId = existing[0].id + await db + .update(PluginTable) + .set({ createdByOrgMembershipId: input.createdByOrgMembershipId, deletedAt: null, description, name: input.plugin.slug, status: "active", updatedAt: new Date() }) + .where(eq(PluginTable.id, pluginId)) + } else { + pluginId = createDenTypeId("plugin") + await db.insert(PluginTable).values({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + deletedAt: null, + description, + id: pluginId, + name: input.plugin.slug, + organizationId: input.organizationId, + status: "active", + }) + } + + await ensureMarketplacePlugin({ ...input, pluginId }) + return pluginId +} + +async function ensureMarketplacePlugin(input: { + createdByOrgMembershipId: MemberId + marketplaceId: MarketplaceId + organizationId: OrganizationId + pluginId: PluginId +}) { + const existing = await db + .select() + .from(MarketplacePluginTable) + .where(and(eq(MarketplacePluginTable.marketplaceId, input.marketplaceId), eq(MarketplacePluginTable.pluginId, input.pluginId))) + .limit(1) + + if (existing[0]) { + await db + .update(MarketplacePluginTable) + .set({ createdByOrgMembershipId: input.createdByOrgMembershipId, membershipSource: "system", removedAt: null }) + .where(eq(MarketplacePluginTable.id, existing[0].id)) + return existing[0].id + } + + const id = createDenTypeId("marketplacePlugin") + await db.insert(MarketplacePluginTable).values({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + id, + marketplaceId: input.marketplaceId, + membershipSource: "system", + organizationId: input.organizationId, + pluginId: input.pluginId, + removedAt: null, + }) + return id +} + +async function ensurePluginAccessGrant(input: { + createdByOrgMembershipId: MemberId + organizationId: OrganizationId + pluginId: PluginId + role: "editor" | "manager" | "viewer" + teamId?: TeamId | null + orgWide?: boolean +}) { + const existing = await db + .select() + .from(PluginAccessGrantTable) + .where(and( + eq(PluginAccessGrantTable.pluginId, input.pluginId), + input.teamId ? eq(PluginAccessGrantTable.teamId, input.teamId) : eq(PluginAccessGrantTable.orgWide, true), + )) + .limit(1) + + if (existing[0]) { + await db + .update(PluginAccessGrantTable) + .set({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + orgMembershipId: null, + orgWide: input.orgWide ?? !input.teamId, + removedAt: null, + role: input.role, + teamId: input.teamId ?? null, + }) + .where(eq(PluginAccessGrantTable.id, existing[0].id)) + return existing[0].id + } + + const id = createDenTypeId("pluginAccessGrant") + await db.insert(PluginAccessGrantTable).values({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + id, + organizationId: input.organizationId, + orgMembershipId: null, + orgWide: input.orgWide ?? !input.teamId, + pluginId: input.pluginId, + role: input.role, + teamId: input.teamId ?? null, + }) + return id +} + +async function ensureConfigObject(input: { + createdByOrgMembershipId: MemberId + object: PluginContentObject + organizationId: OrganizationId + pluginId: PluginId +}) { + const currentFileName = fileNameFromPath(input.object.path) + const currentFileExtension = extensionFromPath(input.object.path) + const rawSourceText = trimForEncryptedText(input.object.rawSourceText) + const searchText = deriveSearchText({ description: input.object.description, rawSourceText, title: input.object.title }) + + const existing = await db + .select() + .from(ConfigObjectTable) + .where(and(eq(ConfigObjectTable.organizationId, input.organizationId), eq(ConfigObjectTable.currentRelativePath, input.object.path))) + .limit(1) + + let configObjectId: ConfigObjectId + if (existing[0]) { + configObjectId = existing[0].id + await db + .update(ConfigObjectTable) + .set({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + currentFileExtension, + currentFileName, + currentRelativePath: input.object.path, + deletedAt: null, + description: input.object.description, + objectType: input.object.objectType, + searchText, + sourceMode: "import", + status: "active", + title: input.object.title, + updatedAt: new Date(), + }) + .where(eq(ConfigObjectTable.id, configObjectId)) + } else { + configObjectId = createDenTypeId("configObject") + await db.insert(ConfigObjectTable).values({ + connectorInstanceId: null, + createdByOrgMembershipId: input.createdByOrgMembershipId, + currentFileExtension, + currentFileName, + currentRelativePath: input.object.path, + deletedAt: null, + description: input.object.description, + id: configObjectId, + objectType: input.object.objectType, + organizationId: input.organizationId, + searchText, + sourceMode: "import", + status: "active", + title: input.object.title, + }) + } + + await db.insert(ConfigObjectVersionTable).values({ + configObjectId, + connectorSyncEventId: null, + createdByOrgMembershipId: input.createdByOrgMembershipId, + createdVia: "import", + id: createDenTypeId("configObjectVersion"), + isDeletedVersion: false, + normalizedPayloadJson: input.object.normalizedPayloadJson ?? null, + organizationId: input.organizationId, + rawSourceText, + schemaVersion: "claude-plugin/demo-seed-v1", + sourceRevisionRef: SOURCE_REVISION_REF, + }) + + await ensureConfigObjectAccessGrant({ + configObjectId, + createdByOrgMembershipId: input.createdByOrgMembershipId, + organizationId: input.organizationId, + }) + await ensurePluginConfigObject({ + configObjectId, + createdByOrgMembershipId: input.createdByOrgMembershipId, + organizationId: input.organizationId, + pluginId: input.pluginId, + }) +} + +async function ensureConfigObjectAccessGrant(input: { + configObjectId: ConfigObjectId + createdByOrgMembershipId: MemberId + organizationId: OrganizationId +}) { + const existing = await db + .select() + .from(ConfigObjectAccessGrantTable) + .where(and(eq(ConfigObjectAccessGrantTable.configObjectId, input.configObjectId), eq(ConfigObjectAccessGrantTable.orgWide, true))) + .limit(1) + if (existing[0]) { + await db + .update(ConfigObjectAccessGrantTable) + .set({ createdByOrgMembershipId: input.createdByOrgMembershipId, orgWide: true, removedAt: null, role: "viewer" }) + .where(eq(ConfigObjectAccessGrantTable.id, existing[0].id)) + return existing[0].id + } + const id = createDenTypeId("configObjectAccessGrant") + await db.insert(ConfigObjectAccessGrantTable).values({ + configObjectId: input.configObjectId, + createdByOrgMembershipId: input.createdByOrgMembershipId, + id, + organizationId: input.organizationId, + orgMembershipId: null, + orgWide: true, + role: "viewer", + teamId: null, + }) + return id +} + +async function ensurePluginConfigObject(input: { + configObjectId: ConfigObjectId + createdByOrgMembershipId: MemberId + organizationId: OrganizationId + pluginId: PluginId +}) { + const existing = await db + .select() + .from(PluginConfigObjectTable) + .where(and(eq(PluginConfigObjectTable.pluginId, input.pluginId), eq(PluginConfigObjectTable.configObjectId, input.configObjectId))) + .limit(1) + if (existing[0]) { + await db + .update(PluginConfigObjectTable) + .set({ createdByOrgMembershipId: input.createdByOrgMembershipId, membershipSource: "system", removedAt: null }) + .where(eq(PluginConfigObjectTable.id, existing[0].id)) + return existing[0].id + } + const id = createDenTypeId("pluginConfigObject") + await db.insert(PluginConfigObjectTable).values({ + configObjectId: input.configObjectId, + connectorMappingId: null, + createdByOrgMembershipId: input.createdByOrgMembershipId, + id, + membershipSource: "system", + organizationId: input.organizationId, + pluginId: input.pluginId, + removedAt: null, + }) + return id +} + +function parseJsonObject(value: string): Record | undefined { + try { + const parsed = JSON.parse(value) as unknown + return typeof parsed === "object" && parsed !== null && !Array.isArray(parsed) ? parsed as Record : undefined + } catch { + return undefined + } +} + +async function fetchContents(path: string) { + const encoded = path.split("/").map((part) => encodeURIComponent(part)).join("/") + const response = await fetchJson(`${GITHUB_API_BASE}/${encoded}?ref=${encodeURIComponent(GITHUB_REF)}`) + return Array.isArray(response) ? response : [] +} + +async function fetchPluginContent(plugin: DemoPlugin): Promise { + const objects: PluginContentObject[] = [] + const manifestPath = `${plugin.slug}/.claude-plugin/plugin.json` + const manifestRaw = await fetchText(`${GITHUB_RAW_BASE}/${manifestPath}`) + objects.push({ + description: "Claude-compatible plugin manifest imported for the local Den demo.", + normalizedPayloadJson: manifestRaw ? parseJsonObject(manifestRaw) : { name: plugin.slug, description: plugin.description }, + objectType: "context", + path: manifestPath, + rawSourceText: manifestRaw ?? JSON.stringify({ name: plugin.slug, description: plugin.description, source: `${GITHUB_REPO}/${plugin.slug}` }, null, 2), + title: `${plugin.slug} manifest`, + }) + + const mcpPath = `${plugin.slug}/.mcp.json` + const mcpRaw = await fetchText(`${GITHUB_RAW_BASE}/${mcpPath}`) + if (mcpRaw) { + objects.push({ + description: "MCP connector manifest stored as plugin configuration only; demo seed does not create active connector accounts.", + normalizedPayloadJson: parseJsonObject(mcpRaw), + objectType: "mcp", + path: mcpPath, + rawSourceText: mcpRaw, + title: `${plugin.slug} MCP manifest`, + }) + } else { + objects.push({ + description: "Demo MCP placeholder showing where connector configuration would live without connecting any integration.", + normalizedPayloadJson: { mcpServers: {}, demoOnly: true, source: `${GITHUB_REPO}/${plugin.slug}` }, + objectType: "mcp", + path: `${plugin.slug}/.mcp.demo.json`, + rawSourceText: JSON.stringify({ mcpServers: {}, demoOnly: true, source: `${GITHUB_REPO}/${plugin.slug}` }, null, 2), + title: `${plugin.slug} MCP manifest`, + }) + } + + const skillEntries = (await fetchContents(`${plugin.slug}/skills`)) + .filter((entry) => entry.type === "dir" || entry.name.toLowerCase() === "skill.md") + .slice(0, 3) + + for (const entry of skillEntries) { + const skillPath = entry.type === "dir" ? `${entry.path}/SKILL.md` : entry.path + const raw = await fetchText(`${GITHUB_RAW_BASE}/${skillPath}`) + if (!raw) continue + objects.push({ + description: `Real skill source from ${GITHUB_REPO}/${skillPath}.`, + objectType: "skill", + path: skillPath, + rawSourceText: raw, + title: titleFromPath(entry.type === "dir" ? entry.path : skillPath), + }) + } + + const commandEntries = (await fetchContents(`${plugin.slug}/commands`)) + .filter((entry) => entry.type === "file" && /\.(md|mdx)$/i.test(entry.name)) + .slice(0, 2) + + for (const entry of commandEntries) { + const raw = await fetchText(`${GITHUB_RAW_BASE}/${entry.path}`) + if (!raw) continue + objects.push({ + description: `Real command source from ${GITHUB_REPO}/${entry.path}.`, + objectType: "command", + path: entry.path, + rawSourceText: raw, + title: titleFromPath(entry.path), + }) + } + + if (!objects.some((object) => object.objectType === "skill")) { + objects.push({ + description: "Demo fallback skill generated from the real marketplace description.", + objectType: "skill", + path: `${plugin.slug}/skills/demo-overview/SKILL.md`, + rawSourceText: `# ${plugin.slug} overview\n\n${plugin.description}\n\nSource: https://github.com/${GITHUB_REPO}/tree/${GITHUB_REF}/${plugin.slug}\n\nThis fallback is used when GitHub source fetching is unavailable during local seeding.`, + title: `${plugin.slug} overview`, + }) + } + + return objects +} + +function log(icon: string, message: string) { + console.log(` ${icon} ${message}`) +} + +async function resetDemoOrg() { + const existing = await db.select().from(OrganizationTable).where(eq(OrganizationTable.slug, DEMO_ORG_SLUG)).limit(1) + if (!existing[0]) { + log("⊘", "no existing demo org to reset") + return + } + const orgId = existing[0].id + log("↻", `resetting demo org ${orgId}…`) + + const pluginIds = (await db.select({ id: PluginTable.id }).from(PluginTable).where(eq(PluginTable.organizationId, orgId))).map((r) => r.id) + const marketplaceIds = (await db.select({ id: MarketplaceTable.id }).from(MarketplaceTable).where(eq(MarketplaceTable.organizationId, orgId))).map((r) => r.id) + const configObjectIds = (await db.select({ id: ConfigObjectTable.id }).from(ConfigObjectTable).where(eq(ConfigObjectTable.organizationId, orgId))).map((r) => r.id) + + if (configObjectIds.length > 0) { + await db.delete(ConfigObjectVersionTable).where(inArray(ConfigObjectVersionTable.configObjectId, configObjectIds)) + await db.delete(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.configObjectId, configObjectIds)) + await db.delete(ConfigObjectAccessGrantTable).where(inArray(ConfigObjectAccessGrantTable.configObjectId, configObjectIds)) + await db.delete(ConfigObjectTable).where(inArray(ConfigObjectTable.id, configObjectIds)) + } + if (pluginIds.length > 0) { + await db.delete(MarketplacePluginTable).where(inArray(MarketplacePluginTable.pluginId, pluginIds)) + await db.delete(PluginAccessGrantTable).where(inArray(PluginAccessGrantTable.pluginId, pluginIds)) + await db.delete(PluginTable).where(inArray(PluginTable.id, pluginIds)) + } + if (marketplaceIds.length > 0) { + await db.delete(MarketplaceAccessGrantTable).where(inArray(MarketplaceAccessGrantTable.marketplaceId, marketplaceIds)) + await db.delete(MarketplaceTable).where(inArray(MarketplaceTable.id, marketplaceIds)) + } + await db.delete(InvitationTable).where(eq(InvitationTable.organizationId, orgId)) + await db.delete(TeamMemberTable).where(inArray(TeamMemberTable.teamId, (await db.select({ id: TeamTable.id }).from(TeamTable).where(eq(TeamTable.organizationId, orgId))).map((r) => r.id))) + await db.delete(TeamTable).where(eq(TeamTable.organizationId, orgId)) + await db.delete(MemberTable).where(eq(MemberTable.organizationId, orgId)) + await db.delete(OrganizationTable).where(eq(OrganizationTable.id, orgId)) + log("✓", "demo org data deleted") +} + +async function seedPeopleAndTeams(organizationId: OrganizationId) { + const userIdsByEmail = new Map() + const memberIdsByEmail = new Map() + const teamIdsByName = new Map() + + for (const person of demoPeople) { + const userId = await ensureDisplayUser(person) + userIdsByEmail.set(person.email.toLowerCase(), userId) + const memberId = await ensureMember(organizationId, userId, person.role) + memberIdsByEmail.set(person.email.toLowerCase(), memberId) + } + + for (const teamName of [...new Set(demoPeople.flatMap((person) => person.teams).concat(pendingInvites.map((invite) => invite.team)))]) { + teamIdsByName.set(teamName, await ensureTeam(organizationId, teamName)) + } + + for (const person of demoPeople) { + const memberId = memberIdsByEmail.get(person.email.toLowerCase()) + if (!memberId) continue + for (const teamName of person.teams) { + const teamId = teamIdsByName.get(teamName) + if (teamId) await ensureTeamMember(teamId, memberId) + } + } + + const ownerUserId = userIdsByEmail.get(DEMO_OWNER_EMAIL.toLowerCase()) + if (!ownerUserId) throw new Error("Demo owner user missing after seed.") + for (const invite of pendingInvites) { + await ensureInvitation({ + email: invite.email, + inviterId: ownerUserId, + organizationId, + role: invite.role, + teamId: teamIdsByName.get(invite.team) ?? null, + }) + } + + return { memberIdsByEmail, teamIdsByName, userIdsByEmail } +} + +async function seedPlugins(input: { + createdByOrgMembershipId: MemberId + marketplaceId: MarketplaceId + organizationId: OrganizationId + teamIdsByName: Map +}) { + let seededPlugins = 0 + let seededObjects = 0 + for (const plugin of demoPlugins) { + const pluginId = await ensurePlugin({ ...input, plugin }) + if (plugin.orgWide) { + await ensurePluginAccessGrant({ ...input, orgWide: true, pluginId, role: "viewer" }) + } + for (const teamName of plugin.teamAccess) { + const teamId = input.teamIdsByName.get(teamName) + if (teamId) { + await ensurePluginAccessGrant({ ...input, pluginId, role: "editor", teamId }) + } + } + + const contentObjects = await fetchPluginContent(plugin) + for (const object of contentObjects) { + await ensureConfigObject({ + createdByOrgMembershipId: input.createdByOrgMembershipId, + object, + organizationId: input.organizationId, + pluginId, + }) + seededObjects++ + } + seededPlugins++ + log("✓", `plugin ${seededPlugins}/${demoPlugins.length}: ${plugin.slug} (${contentObjects.length} objects)`) + } + return { seededObjects, seededPlugins } +} + +async function main() { + assertSafeDevTarget() + const startMs = Date.now() + + console.log() + console.log(` den demo seed · ${DEMO_ORG_NAME}`) + console.log(` ${"─".repeat(40)}`) + log("◈", `org slug: ${DEMO_ORG_SLUG}`) + log("◈", `database: ${env.databaseUrl?.replace(/:[^@]*@/, ":***@") ?? "unknown"}`) + log("◈", `github fetch: ${SHOULD_FETCH_GITHUB ? "enabled" : "disabled"}`) + if (RESET_MODE) log("◈", "reset mode: will delete existing demo org first") + console.log() + + if (RESET_MODE) { + await resetDemoOrg() + console.log() + } + + log("…", "creating owner account") + const ownerUserId = await ensureSignedInOwnerUser() + log("✓", `owner: ${DEMO_OWNER_EMAIL}`) + + log("…", "creating organization") + const organizationId = await ensureOrganization(ownerUserId) + log("✓", `org: ${organizationId}`) + console.log() + + log("…", `seeding ${demoPeople.length} users and teams`) + const { memberIdsByEmail, teamIdsByName } = await seedPeopleAndTeams(organizationId) + log("✓", `${memberIdsByEmail.size} members · ${teamIdsByName.size} teams · ${pendingInvites.length} pending invites`) + console.log() + + const ownerMembershipId = memberIdsByEmail.get(DEMO_OWNER_EMAIL.toLowerCase()) + if (!ownerMembershipId) throw new Error("Demo owner membership missing after seed.") + + log("…", "creating marketplace") + const marketplaceId = await ensureMarketplace({ createdByOrgMembershipId: ownerMembershipId, organizationId }) + log("✓", `marketplace: ${marketplaceId}`) + console.log() + + log("…", `seeding ${demoPlugins.length} plugins`) + const { seededObjects, seededPlugins } = await seedPlugins({ createdByOrgMembershipId: ownerMembershipId, marketplaceId, organizationId, teamIdsByName }) + console.log() + + const elapsedSeconds = ((Date.now() - startMs) / 1000).toFixed(1) + console.log(` ${"─".repeat(40)}`) + log("✓", `done in ${elapsedSeconds}s`) + log(" ", `${memberIdsByEmail.size} members · ${teamIdsByName.size} teams · ${seededPlugins} plugins · ${seededObjects} config objects`) + console.log() + log("→", `login: ${DEMO_OWNER_EMAIL} / ${DEMO_OWNER_PASSWORD}`) + log("→", `open: /organization or /o/${DEMO_ORG_SLUG}/dashboard`) + console.log() +} + +main() + .then(() => process.exit(0)) + .catch((error) => { + console.error(error instanceof Error ? error.message : String(error)) + process.exit(1) + }) diff --git a/ee/apps/den-api/scripts/smoke-email-failures.mjs b/ee/apps/den-api/scripts/smoke-email-failures.mjs new file mode 100644 index 0000000000..7d845606c4 --- /dev/null +++ b/ee/apps/den-api/scripts/smoke-email-failures.mjs @@ -0,0 +1,56 @@ +#!/usr/bin/env node +/** + * Standalone smoke test for the invitation-email failure paths. + * + * Run inside the den-api container (or any environment where the package has + * been built to `dist/`): + * + * docker exec -e OPENWORK_DEV_MODE=0 \ + * openwork-den-dev--den-1 \ + * node ee/apps/den-api/scripts/smoke-email-failures.mjs + * + * Expected output: + * [smoke] ok loops_not_configured { reason: 'loops_not_configured', ... } + * + * Add `-e LOOPS_API_KEY=bogus -e LOOPS_TRANSACTIONAL_ID_DEN_ORG_INVITE_EMAIL=bogus` + * to also reach the `loops_rejected` path (Loops returns 401). + * + * Intentionally side-effect free: no DB writes, no auth. + */ + +const { sendDenOrganizationInvitationEmail, DenEmailSendError } = await import( + "../dist/email.js" +) + +const recipient = process.argv[2] ?? "smoke-test@example.com" + +try { + await sendDenOrganizationInvitationEmail({ + email: recipient, + inviteLink: "https://example.com/join?invite=smoke", + invitedByName: "Smoke Test", + invitedByEmail: "smoke@example.com", + organizationName: "Smoke Org", + role: "member", + }) + + if (process.env.OPENWORK_DEV_MODE === "1" || !process.env.OPENWORK_DEV_MODE) { + console.log("[smoke] ok dev_mode_noop (no email sent, no throw — expected)") + process.exit(0) + } + + console.error("[smoke] FAIL: expected throw when Loops is not configured or rejects") + process.exit(1) +} catch (error) { + if (!(error instanceof DenEmailSendError)) { + console.error("[smoke] FAIL: wrong error class:", error) + process.exit(1) + } + + console.log(`[smoke] ok ${error.reason}`, { + reason: error.reason, + template: error.template, + recipient: error.recipient, + detail: error.detail, + }) +} diff --git a/ee/apps/den-api/src/CONSTS.ts b/ee/apps/den-api/src/CONSTS.ts new file mode 100644 index 0000000000..63beb864ae --- /dev/null +++ b/ee/apps/den-api/src/CONSTS.ts @@ -0,0 +1 @@ +export const DEN_WORKER_POLL_INTERVAL_MS = 1000 diff --git a/ee/apps/den-api/src/active-organization.ts b/ee/apps/den-api/src/active-organization.ts new file mode 100644 index 0000000000..9221df4574 --- /dev/null +++ b/ee/apps/den-api/src/active-organization.ts @@ -0,0 +1,19 @@ +import { asc, eq } from "@openwork-ee/den-db/drizzle" +import { MemberTable } from "@openwork-ee/den-db/schema" +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import { db } from "./db.js" + +export async function getInitialActiveOrganizationIdForUser(userId: string) { + const normalizedUserId = normalizeDenTypeId("user", userId) + + const rows = await db + .select({ + organizationId: MemberTable.organizationId, + }) + .from(MemberTable) + .where(eq(MemberTable.userId, normalizedUserId)) + .orderBy(asc(MemberTable.createdAt)) + .limit(1) + + return rows[0]?.organizationId ?? null +} diff --git a/ee/apps/den-api/src/admin-allowlist.ts b/ee/apps/den-api/src/admin-allowlist.ts new file mode 100644 index 0000000000..a5e593891a --- /dev/null +++ b/ee/apps/den-api/src/admin-allowlist.ts @@ -0,0 +1,53 @@ +import { sql } from "@openwork-ee/den-db/drizzle" +import { AdminAllowlistTable } from "@openwork-ee/den-db/schema" +import { createDenTypeId } from "@openwork-ee/utils/typeid" +import { db } from "./db.js" + +const ADMIN_ALLOWLIST_SEEDS = [ + { + email: "ben@openworklabs.com", + note: "Seeded internal admin", + }, + { + email: "jan@openworklabs.com", + note: "Seeded internal admin", + }, + { + email: "omar@openworklabs.com", + note: "Seeded internal admin", + }, + { + email: "berk@openworklabs.com", + note: "Seeded internal admin", + }, +] as const + +let ensureAdminAllowlistSeededPromise: Promise | null = null + +async function seedAdminAllowlist() { + for (const entry of ADMIN_ALLOWLIST_SEEDS) { + await db + .insert(AdminAllowlistTable) + .values({ + id: createDenTypeId("adminAllowlist"), + ...entry, + }) + .onDuplicateKeyUpdate({ + set: { + note: entry.note, + updated_at: sql`CURRENT_TIMESTAMP(3)`, + }, + }) + } +} + +export async function ensureAdminAllowlistSeeded() { + if (!ensureAdminAllowlistSeededPromise) { + ensureAdminAllowlistSeededPromise = seedAdminAllowlist().catch((error) => { + ensureAdminAllowlistSeededPromise = null + throw error + }) + } + + await ensureAdminAllowlistSeededPromise +} diff --git a/ee/apps/den-api/src/api-keys.ts b/ee/apps/den-api/src/api-keys.ts new file mode 100644 index 0000000000..037c9cf0cb --- /dev/null +++ b/ee/apps/den-api/src/api-keys.ts @@ -0,0 +1,222 @@ +import { and, asc, desc, eq, inArray } from "@openwork-ee/den-db/drizzle" +import { AuthApiKeyTable, AuthUserTable, MemberTable } from "@openwork-ee/den-db/schema" +import type { DenTypeId } from "@openwork-ee/utils/typeid" +import { db } from "./db.js" + +export const DEN_API_KEY_HEADER = "x-api-key" +export const DEN_API_KEY_DEFAULT_PREFIX = "den_" +export const DEN_API_KEY_RATE_LIMIT_MAX = 600 +export const DEN_API_KEY_RATE_LIMIT_TIME_WINDOW_MS = 60_000 + +type UserId = typeof AuthUserTable.$inferSelect.id +type OrganizationId = typeof MemberTable.$inferSelect.organizationId +type OrganizationMemberId = typeof MemberTable.$inferSelect.id +type ApiKeyId = typeof AuthApiKeyTable.$inferSelect.id + +export type DenApiKeyMetadata = { + organizationId: OrganizationId + orgMembershipId: OrganizationMemberId + issuedByUserId: UserId + issuedByOrgMembershipId: OrganizationMemberId +} + +export type DenApiKeySession = { + id: ApiKeyId + configId: string + referenceId: string + metadata: DenApiKeyMetadata | null +} + +export type OrganizationApiKeySummary = { + id: ApiKeyId + configId: string + name: string | null + start: string | null + prefix: string | null + enabled: boolean + rateLimitEnabled: boolean + rateLimitMax: number | null + rateLimitTimeWindow: number | null + lastRequest: Date | null + expiresAt: Date | null + createdAt: Date + updatedAt: Date + owner: { + userId: UserId + memberId: OrganizationMemberId + name: string + email: string + image: string | null + } +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null +} + +function parseApiKeyMetadata(value: unknown): DenApiKeyMetadata | null { + const parsed = typeof value === "string" + ? (() => { + try { + return JSON.parse(value) as unknown + } catch { + return null + } + })() + : value + + if (!isRecord(parsed)) { + return null + } + + const organizationId = typeof parsed.organizationId === "string" ? parsed.organizationId : null + const orgMembershipId = typeof parsed.orgMembershipId === "string" ? parsed.orgMembershipId : null + const issuedByUserId = typeof parsed.issuedByUserId === "string" ? parsed.issuedByUserId : null + const issuedByOrgMembershipId = typeof parsed.issuedByOrgMembershipId === "string" ? parsed.issuedByOrgMembershipId : null + + if (!organizationId || !orgMembershipId || !issuedByUserId || !issuedByOrgMembershipId) { + return null + } + + return { + organizationId: organizationId as OrganizationId, + orgMembershipId: orgMembershipId as OrganizationMemberId, + issuedByUserId: issuedByUserId as UserId, + issuedByOrgMembershipId: issuedByOrgMembershipId as OrganizationMemberId, + } +} + +export function buildOrganizationApiKeyMetadata(input: { + organizationId: OrganizationId + orgMembershipId: OrganizationMemberId + issuedByUserId: UserId + issuedByOrgMembershipId: OrganizationMemberId +}): DenApiKeyMetadata { + return { + organizationId: input.organizationId, + orgMembershipId: input.orgMembershipId, + issuedByUserId: input.issuedByUserId, + issuedByOrgMembershipId: input.issuedByOrgMembershipId, + } +} + +export async function getApiKeySessionById(apiKeyId: string): Promise { + const rows = await db + .select({ + id: AuthApiKeyTable.id, + configId: AuthApiKeyTable.configId, + referenceId: AuthApiKeyTable.referenceId, + metadata: AuthApiKeyTable.metadata, + }) + .from(AuthApiKeyTable) + .where(eq(AuthApiKeyTable.id, apiKeyId)) + .limit(1) + + const row = rows[0] + if (!row) { + return null + } + + return { + id: row.id, + configId: row.configId, + referenceId: row.referenceId, + metadata: parseApiKeyMetadata(row.metadata), + } +} + +export async function listOrganizationApiKeys(organizationId: OrganizationId): Promise { + const members = await db + .select({ + memberId: MemberTable.id, + userId: MemberTable.userId, + userName: AuthUserTable.name, + userEmail: AuthUserTable.email, + userImage: AuthUserTable.image, + }) + .from(MemberTable) + .innerJoin(AuthUserTable, eq(MemberTable.userId, AuthUserTable.id)) + .where(eq(MemberTable.organizationId, organizationId)) + .orderBy(asc(MemberTable.createdAt)) + + if (members.length === 0) { + return [] + } + + const memberByUserId = new Map(members.map((member) => [member.userId, member])) + + const apiKeys = await db + .select() + .from(AuthApiKeyTable) + .where(inArray(AuthApiKeyTable.referenceId, members.map((member) => member.userId))) + .orderBy(desc(AuthApiKeyTable.createdAt)) + + return apiKeys + .map((apiKey) => { + const owner = memberByUserId.get(apiKey.referenceId as UserId) + const metadata = parseApiKeyMetadata(apiKey.metadata) + + if (!owner || !metadata || metadata.organizationId !== organizationId || metadata.orgMembershipId !== owner.memberId) { + return null + } + + return { + id: apiKey.id, + configId: apiKey.configId, + name: apiKey.name, + start: apiKey.start, + prefix: apiKey.prefix, + enabled: apiKey.enabled, + rateLimitEnabled: apiKey.rateLimitEnabled, + rateLimitMax: apiKey.rateLimitMax, + rateLimitTimeWindow: apiKey.rateLimitTimeWindow, + lastRequest: apiKey.lastRequest, + expiresAt: apiKey.expiresAt, + createdAt: apiKey.createdAt, + updatedAt: apiKey.updatedAt, + owner: { + userId: owner.userId, + memberId: owner.memberId, + name: owner.userName, + email: owner.userEmail, + image: owner.userImage, + }, + } satisfies OrganizationApiKeySummary + }) + .filter((apiKey): apiKey is OrganizationApiKeySummary => apiKey !== null) +} + +export async function getOrganizationApiKeyById(input: { + organizationId: OrganizationId + apiKeyId: ApiKeyId +}) { + const keys = await listOrganizationApiKeys(input.organizationId) + return keys.find((apiKey) => apiKey.id === input.apiKeyId) ?? null +} + +export async function deleteOrganizationApiKey(input: { + organizationId: OrganizationId + apiKeyId: ApiKeyId +}) { + const apiKey = await getOrganizationApiKeyById(input) + if (!apiKey) { + return null + } + + await db + .delete(AuthApiKeyTable) + .where(and(eq(AuthApiKeyTable.id, input.apiKeyId), eq(AuthApiKeyTable.referenceId, apiKey.owner.userId))) + + return apiKey +} + +export function isScopedApiKeyForOrganization(input: { + apiKey: DenApiKeySession | null + organizationId: string +}) { + return input.apiKey?.metadata?.organizationId === input.organizationId +} + +export function getApiKeyScopedOrganizationId(apiKey: DenApiKeySession | null): DenTypeId<"organization"> | null { + return apiKey?.metadata?.organizationId ?? null +} diff --git a/ee/apps/den-api/src/app.ts b/ee/apps/den-api/src/app.ts new file mode 100644 index 0000000000..cb13ac0ff2 --- /dev/null +++ b/ee/apps/den-api/src/app.ts @@ -0,0 +1,215 @@ +import "./load-env.js" +import { createDenTypeId } from "@openwork-ee/utils/typeid" +import { swaggerUI } from "@hono/swagger-ui" +import { cors } from "hono/cors" +import { Hono } from "hono" +import { logger } from "hono/logger" +import type { RequestIdVariables } from "hono/request-id" +import { requestId } from "hono/request-id" +import { describeRoute, openAPIRouteHandler, resolver } from "hono-openapi" +import { z } from "zod" +import { env } from "./env.js" +import type { MemberTeamsContext, OrganizationContextVariables, UserOrganizationsContext } from "./middleware/index.js" +import { buildOperationId, emptyResponse, htmlResponse, jsonResponse } from "./openapi.js" +import { registerAdminRoutes } from "./routes/admin/index.js" +import { registerAuthRoutes } from "./routes/auth/index.js" +import { registerMeRoutes } from "./routes/me/index.js" +import { registerOrgRoutes } from "./routes/org/index.js" +import { registerVersionRoutes } from "./routes/version/index.js" +import { registerWebhookRoutes } from "./routes/webhooks/index.js" +import { registerWorkerRoutes } from "./routes/workers/index.js" +import { registerMcpRoutes } from "./mcp/index.js" +import type { AuthContextVariables } from "./session.js" +import { sessionMiddleware } from "./session.js" + +type AppVariables = RequestIdVariables & AuthContextVariables & Partial & Partial & Partial + +const healthResponseSchema = z.object({ + ok: z.literal(true), + service: z.literal("den-api"), +}).meta({ ref: "DenApiHealthResponse" }) + +const openApiDocumentSchema = z.object({ + openapi: z.string(), + info: z.object({ + title: z.string(), + version: z.string(), + }).passthrough(), + paths: z.record(z.string(), z.unknown()), + components: z.object({}).passthrough().optional(), +}).passthrough().meta({ ref: "OpenApiDocument" }) + +const app = new Hono<{ Variables: AppVariables }>() + +app.use("*", logger()) +app.use("*", requestId({ + headerName: "", + generator: () => createDenTypeId("request"), +})) +app.use("*", async (c, next) => { + await next() + c.header("X-Request-Id", c.get("requestId")) +}) + +if (env.corsOrigins.length > 0) { + app.use( + "*", + cors({ + origin: env.corsOrigins, + credentials: true, + allowHeaders: ["Content-Type", "Authorization", "X-Api-Key", "X-Request-Id"], + allowMethods: ["GET", "POST", "PATCH", "DELETE", "OPTIONS"], + exposeHeaders: ["Content-Length", "X-Request-Id"], + maxAge: 600, + }), + ) +} + +app.use("*", sessionMiddleware) + +app.get( + "/", + describeRoute({ + tags: ["System"], + hide: true, + summary: "Redirect API root", + description: "Redirects the API root to the OpenWork marketing site instead of serving API content.", + responses: { + 302: emptyResponse("Redirect to the OpenWork marketing site."), + }, + }), + (c) => { + return c.redirect("https://openworklabs.com", 302) + }, +) + +app.get( + "/health", + describeRoute({ + tags: ["System"], + summary: "Check den-api health", + description: "Returns a lightweight health payload for den-api.", + responses: { + 200: { + description: "den-api is reachable", + content: { + "application/json": { + schema: resolver(healthResponseSchema), + }, + }, + }, + }, + }), + (c) => { + return c.json({ ok: true, service: "den-api" }) + }, +) + +registerAdminRoutes(app) +registerAuthRoutes(app) +registerMeRoutes(app) +registerOrgRoutes(app) +registerVersionRoutes(app) +registerWebhookRoutes(app) +registerWorkerRoutes(app) +registerMcpRoutes(app) + +app.get( + "/openapi.json", + describeRoute({ + tags: ["System"], + summary: "Get OpenAPI document", + description: "Returns the machine-readable OpenAPI 3.1 document for the Den API so humans and tools can inspect the API surface.", + responses: { + 200: jsonResponse("OpenAPI document returned successfully.", openApiDocumentSchema), + }, + }), + openAPIRouteHandler(app, { + documentation: { + openapi: "3.1.0", + info: { + title: "Den API", + version: "dev", + description: [ + "OpenAPI spec for the Den control plane API.", + "", + "Authentication:", + "- Use `Authorization: Bearer ` for user-authenticated routes that require a Den session.", + "- Use `x-api-key: ` for API-key-authenticated routes that accept organization API keys.", + "- Public routes like health and documentation do not require authentication.", + "", + "Swagger tip: use the security schemes in the Authorize dialog to set either `bearerAuth` or `denApiKey` before trying protected endpoints.", + ].join("\n"), + }, + servers: [ + { url: "https://api.openworklabs.com" }, + ], + tags: [ + { name: "System", description: "Service health and operational routes." }, + { name: "Organizations", description: "Top-level organization creation and context routes." }, + { name: "Invitations", description: "Invitation preview, acceptance, creation, and cancellation routes." }, + { name: "API Keys", description: "Organization API key management routes." }, + { name: "Members", description: "Organization member management routes." }, + { name: "Roles", description: "Organization custom role management routes." }, + { name: "Teams", description: "Organization team management routes." }, + { name: "Templates", description: "Organization shared template routes." }, + { name: "LLM Providers", description: "Organization LLM provider catalog, configuration, and access routes." }, + { name: "Skills", description: "Organization skill authoring and sharing routes." }, + { name: "Skill Hubs", description: "Organization skill hub management and access routes." }, + { name: "Workers", description: "Worker lifecycle, billing, and runtime routes." }, + { name: "Worker Runtime", description: "Worker runtime inspection and upgrade routes." }, + { name: "Worker Activity", description: "Worker heartbeat and activity reporting routes." }, + { name: "Admin", description: "Administrative reporting routes." }, + { name: "Users", description: "Current user and membership routes." }, + ], + components: { + securitySchemes: { + bearerAuth: { + type: "http", + scheme: "bearer", + bearerFormat: "session-token", + description: "Session token passed as `Authorization: Bearer ` for user-authenticated Den routes.", + }, + denApiKey: { + type: "apiKey", + in: "header", + name: "x-api-key", + description: "Organization API key passed as the `x-api-key` header for API-key-authenticated Den routes.", + }, + }, + }, + }, + includeEmptyPaths: true, + exclude: ["/docs", "/openapi.json"], + excludeMethods: ["OPTIONS"], + defaultOptions: { + ALL: { + operationId: (route) => buildOperationId(route.method, route.path), + }, + }, + }), +) + +app.get( + "/docs", + describeRoute({ + tags: ["System"], + summary: "Serve Swagger UI", + description: "Serves Swagger UI so developers can browse and try the Den API from a browser.", + responses: { + 200: htmlResponse("Swagger UI page returned successfully."), + }, + }), + swaggerUI({ + url: "/openapi.json", + persistAuthorization: true, + displayOperationId: true, + defaultModelsExpandDepth: 1, + }), +) + +app.notFound((c) => { + return c.json({ error: "not_found" }, 404) +}) + +export default app diff --git a/ee/apps/den-api/src/auth.ts b/ee/apps/den-api/src/auth.ts new file mode 100644 index 0000000000..923818950f --- /dev/null +++ b/ee/apps/den-api/src/auth.ts @@ -0,0 +1,365 @@ +import { getInitialActiveOrganizationIdForUser } from "./active-organization.js"; +import { db } from "./db.js"; +import { env } from "./env.js"; +import { + sendDenOrganizationInvitationEmail, + sendDenVerificationEmail, +} from "./email.js"; +import { syncDenSignupContact } from "./loops.js"; +import { + DEN_API_KEY_DEFAULT_PREFIX, + DEN_API_KEY_RATE_LIMIT_MAX, + DEN_API_KEY_RATE_LIMIT_TIME_WINDOW_MS, +} from "./api-keys.js"; +import { + denOrganizationAccess, + denOrganizationStaticRoles, +} from "./organization-access.js"; +import { seedDefaultOrganizationRoles } from "./orgs.js"; +import { createDenTypeId, normalizeDenTypeId } from "@openwork-ee/utils/typeid"; +import * as schema from "@openwork-ee/den-db/schema"; +import { apiKey } from "@better-auth/api-key"; +import { oauthProvider } from "@better-auth/oauth-provider"; +import { APIError } from "better-call"; +import { betterAuth } from "better-auth"; +import { drizzleAdapter } from "better-auth/adapters/drizzle"; +import { emailOTP, jwt, organization } from "better-auth/plugins"; + +function localMcpResourceAliases(resource: string) { + if (!env.devMode) { + return []; + } + + try { + const url = new URL(resource); + if (url.hostname === "127.0.0.1") { + url.hostname = "localhost"; + return [url.toString().replace(/\/+$/, "")]; + } + if (url.hostname === "localhost") { + url.hostname = "127.0.0.1"; + return [url.toString().replace(/\/+$/, "")]; + } + } catch {} + + return []; +} + +export const DEN_MCP_RESOURCE = env.mcpResourceUrl ?? `${env.betterAuthUrl}/mcp`; +export const DEN_MCP_RESOURCES = Array.from(new Set([ + DEN_MCP_RESOURCE, + ...localMcpResourceAliases(DEN_MCP_RESOURCE), +])); +export const DEN_MCP_SCOPES = ["openid", "profile", "email", "offline_access", "mcp:read", "mcp:write"]; +export const DEN_MCP_TOKEN_USE_CLAIM = "https://openworklabs.com/token_use"; +export const DEN_MCP_ORG_ID_CLAIM = "https://openworklabs.com/org_id"; +export const DEN_MCP_RESOURCE_CLAIM = "https://openworklabs.com/resource"; +export const DEN_MCP_OPAQUE_ACCESS_TOKEN_PREFIX = "ow_mcp_at_"; + +const socialProviders = { + ...(env.github.clientId && env.github.clientSecret + ? { + github: { + clientId: env.github.clientId, + clientSecret: env.github.clientSecret, + }, + } + : {}), + ...(env.google.clientId && env.google.clientSecret + ? { + google: { + clientId: env.google.clientId, + clientSecret: env.google.clientSecret, + }, + } + : {}), +}; + +function hasRole(roleValue: string, roleName: string) { + return roleValue + .split(",") + .map((entry) => entry.trim()) + .filter(Boolean) + .includes(roleName); +} + +function getInvitationOrigin() { + return ( + env.betterAuthTrustedOrigins.find((origin) => origin !== "*") ?? + env.betterAuthUrl + ); +} + +function buildInvitationLink(invitationId: string) { + return new URL( + `/join-org?invite=${encodeURIComponent(invitationId)}`, + getInvitationOrigin(), + ).toString(); +} + +function hasMcpScope(scopes: readonly string[]) { + return scopes.some((scope) => scope.startsWith("mcp:")); +} + +export const auth = betterAuth({ + baseURL: env.betterAuthUrl, + secret: env.betterAuthSecret, + trustedOrigins: + env.betterAuthTrustedOrigins.length > 0 + ? env.betterAuthTrustedOrigins + : undefined, + socialProviders: + Object.keys(socialProviders).length > 0 ? socialProviders : undefined, + database: drizzleAdapter(db, { + provider: "mysql", + schema, + }), + databaseHooks: { + session: { + create: { + before: async (session) => { + const activeOrganizationId = await getInitialActiveOrganizationIdForUser(session.userId); + + return { + data: { + ...session, + activeOrganizationId, + }, + }; + }, + }, + }, + }, + advanced: { + ipAddress: { + ipAddressHeaders: ["x-forwarded-for", "x-real-ip", "cf-connecting-ip"], + ipv6Subnet: 64, + }, + database: { + generateId: (options) => { + switch (options.model) { + case "user": + return createDenTypeId("user"); + case "session": + return createDenTypeId("session"); + case "account": + return createDenTypeId("account"); + case "verification": + return createDenTypeId("verification"); + case "apikey": + case "apiKey": + return createDenTypeId("apiKey"); + case "oauthClient": + return createDenTypeId("oauthClient"); + case "oauthAccessToken": + return createDenTypeId("oauthAccessToken"); + case "oauthRefreshToken": + return createDenTypeId("oauthRefreshToken"); + case "oauthConsent": + return createDenTypeId("oauthConsent"); + case "rateLimit": + return createDenTypeId("rateLimit"); + case "organization": + return createDenTypeId("organization"); + case "member": + return createDenTypeId("member"); + case "invitation": + return createDenTypeId("invitation"); + case "team": + return createDenTypeId("team"); + case "teamMember": + return createDenTypeId("teamMember"); + case "organizationRole": + return createDenTypeId("organizationRole"); + default: + return false; + } + }, + }, + }, + rateLimit: { + enabled: true, + storage: "database", + window: 60, + max: 20, + customRules: { + "/sign-in/email": { + window: 300, + max: 5, + }, + "/sign-up/email": { + window: 3600, + max: 3, + }, + "/email-otp/send-verification-otp": { + window: 3600, + max: 5, + }, + "/email-otp/verify-email": { + window: 300, + max: 10, + }, + "/request-password-reset": { + window: 3600, + max: 5, + }, + }, + }, + emailVerification: { + sendOnSignUp: true, + sendOnSignIn: true, + afterEmailVerification: async (user) => { + await syncDenSignupContact({ + email: user.email, + name: user.name, + }); + }, + }, + emailAndPassword: { + enabled: true, + autoSignIn: false, + requireEmailVerification: true, + }, + plugins: [ + jwt(), + emailOTP({ + overrideDefaultEmailVerification: true, + otpLength: 6, + expiresIn: 600, + allowedAttempts: 5, + async sendVerificationOTP({ email, otp, type }) { + await sendDenVerificationEmail({ + email, + verificationCode: otp, + }); + }, + }), + organization({ + ac: denOrganizationAccess, + roles: denOrganizationStaticRoles, + creatorRole: "owner", + requireEmailVerificationOnInvitation: true, + dynamicAccessControl: { + enabled: true, + }, + teams: { + enabled: true, + defaultTeam: { + enabled: false, + }, + }, + async sendInvitationEmail(data) { + await sendDenOrganizationInvitationEmail({ + email: data.email, + inviteLink: buildInvitationLink(data.id), + invitedByName: data.inviter.user.name ?? data.inviter.user.email, + invitedByEmail: data.inviter.user.email, + organizationName: data.organization.name, + role: data.role, + }); + }, + organizationHooks: { + afterCreateOrganization: async ({ organization }) => { + await seedDefaultOrganizationRoles( + normalizeDenTypeId("organization", organization.id), + ); + }, + beforeRemoveMember: async ({ member }) => { + if (hasRole(member.role, "owner")) { + throw new APIError("BAD_REQUEST", { + message: "The organization owner cannot be removed.", + }); + } + }, + beforeUpdateMemberRole: async ({ member, newRole }) => { + if (hasRole(member.role, "owner")) { + throw new APIError("BAD_REQUEST", { + message: "The organization owner role cannot be changed.", + }); + } + + if (hasRole(newRole, "owner")) { + throw new APIError("BAD_REQUEST", { + message: + "Owner can only be assigned during organization creation.", + }); + } + }, + }, + }), + oauthProvider({ + loginPage: env.betterAuthUrl, + consentPage: `${env.betterAuthUrl}/mcp/select-organization`, + scopes: [...DEN_MCP_SCOPES], + validAudiences: DEN_MCP_RESOURCES, + allowPublicClientPrelogin: true, + allowDynamicClientRegistration: true, + allowUnauthenticatedClientRegistration: true, + clientRegistrationDefaultScopes: ["openid", "profile", "email", "mcp:read", "mcp:write"], + clientRegistrationAllowedScopes: [...DEN_MCP_SCOPES], + advertisedMetadata: { + scopes_supported: [...DEN_MCP_SCOPES], + claims_supported: [ + DEN_MCP_TOKEN_USE_CLAIM, + DEN_MCP_ORG_ID_CLAIM, + DEN_MCP_RESOURCE_CLAIM, + ], + }, + postLogin: { + page: `${env.betterAuthUrl}/mcp/select-organization`, + shouldRedirect: async ({ session, scopes }) => { + if (!hasMcpScope(scopes)) { + return false; + } + + return !session.activeOrganizationId; + }, + consentReferenceId: async ({ session, scopes }) => { + if (!hasMcpScope(scopes)) { + return undefined; + } + + const activeOrganizationId = typeof session.activeOrganizationId === "string" + ? session.activeOrganizationId + : undefined; + if (!activeOrganizationId) { + throw new APIError("BAD_REQUEST", { + message: "Select an organization before authorizing MCP access.", + }); + } + + return normalizeDenTypeId("organization", activeOrganizationId); + }, + }, + customAccessTokenClaims: ({ referenceId, resource, scopes }) => { + const claims: Record = {}; + if (hasMcpScope(scopes) || resource === DEN_MCP_RESOURCE) { + claims[DEN_MCP_TOKEN_USE_CLAIM] = "mcp"; + claims[DEN_MCP_RESOURCE_CLAIM] = resource ?? DEN_MCP_RESOURCE; + } + if (referenceId) { + claims[DEN_MCP_ORG_ID_CLAIM] = referenceId; + } + return claims; + }, + prefix: { + opaqueAccessToken: DEN_MCP_OPAQUE_ACCESS_TOKEN_PREFIX, + refreshToken: "ow_mcp_rt_", + clientSecret: "ow_mcp_cs_", + }, + }), + apiKey({ + defaultPrefix: DEN_API_KEY_DEFAULT_PREFIX, + enableMetadata: true, + enableSessionForAPIKeys: true, + maximumNameLength: 64, + requireName: true, + storage: "database", + rateLimit: { + enabled: true, + maxRequests: DEN_API_KEY_RATE_LIMIT_MAX, + timeWindow: DEN_API_KEY_RATE_LIMIT_TIME_WINDOW_MS, + }, + }), + ], +}); diff --git a/ee/apps/den-api/src/billing/polar.ts b/ee/apps/den-api/src/billing/polar.ts new file mode 100644 index 0000000000..d202a6f3cd --- /dev/null +++ b/ee/apps/den-api/src/billing/polar.ts @@ -0,0 +1,819 @@ +import { env } from "../env.js" +import { sendSubscribedToDenEvent } from "../loops.js" + +type PolarCustomerState = { + granted_benefits?: Array<{ + benefit_id?: string + }> +} + +type PolarCheckoutSession = { + url?: string +} + +type PolarCustomerSession = { + customer_portal_url?: string +} + +type PolarCustomer = { + id?: string + email?: string + external_id?: string | null +} + +type PolarListResource = { + items?: T[] +} + +type PolarSubscription = { + id?: string + status?: string + amount?: number + currency?: string + recurring_interval?: string | null + recurring_interval_count?: number | null + current_period_start?: string | null + current_period_end?: string | null + cancel_at_period_end?: boolean + canceled_at?: string | null + ended_at?: string | null +} + +type PolarOrder = { + id?: string + created_at?: string + status?: string + total_amount?: number + net_amount?: number + currency?: string + invoice_number?: string + is_invoice_generated?: boolean +} + +type PolarOrderInvoice = { + url?: string +} + +type PolarProductPrice = { + amount_type?: string + price_currency?: string + price_amount?: number + minimum_amount?: number + preset_amount?: number | null + is_archived?: boolean + seat_tiers?: { + tiers?: Array<{ + price_per_seat?: number + }> + } +} + +type PolarProduct = { + recurring_interval?: string | null + recurring_interval_count?: number | null + prices?: PolarProductPrice[] +} + +export type CloudWorkerAccess = + | { + allowed: true + } + | { + allowed: false + checkoutUrl: string + } + +export type CloudWorkerBillingPrice = { + amount: number | null + currency: string | null + recurringInterval: string | null + recurringIntervalCount: number | null +} + +export type CloudWorkerBillingSubscription = { + id: string + status: string + amount: number | null + currency: string | null + recurringInterval: string | null + recurringIntervalCount: number | null + currentPeriodStart: string | null + currentPeriodEnd: string | null + cancelAtPeriodEnd: boolean + canceledAt: string | null + endedAt: string | null +} + +export type CloudWorkerBillingInvoice = { + id: string + createdAt: string | null + status: string + totalAmount: number | null + currency: string | null + invoiceNumber: string | null + invoiceUrl: string | null +} + +export type CloudWorkerBillingStatus = { + featureGateEnabled: boolean + hasActivePlan: boolean + checkoutRequired: boolean + checkoutUrl: string | null + portalUrl: string | null + price: CloudWorkerBillingPrice | null + subscription: CloudWorkerBillingSubscription | null + invoices: CloudWorkerBillingInvoice[] +} + +export type CloudWorkerAdminBillingStatus = { + status: "paid" | "unpaid" | "unavailable" + featureGateEnabled: boolean + subscriptionId: string | null + subscriptionStatus: string | null + currentPeriodEnd: string | null + source: "benefit" | "subscription" | "unavailable" + note: string | null +} + +type CloudAccessInput = { + userId: string + email: string + name: string +} + +type BillingStatusOptions = { + includeCheckoutUrl?: boolean + includePortalUrl?: boolean + includeInvoices?: boolean +} + +function sanitizeApiBase(value: string) { + return value.replace(/\/+$/, "") +} + +function parseJson(text: string): T | null { + if (!text) { + return null + } + + return JSON.parse(text) as T +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null +} + +async function polarFetch(path: string, init: RequestInit = {}) { + const headers = new Headers(init.headers) + headers.set("Authorization", `Bearer ${env.polar.accessToken}`) + headers.set("Accept", "application/json") + if (init.body && !headers.has("Content-Type")) { + headers.set("Content-Type", "application/json") + } + + return fetch(`${sanitizeApiBase(env.polar.apiBase)}${path}`, { + ...init, + headers, + }) +} + +async function polarFetchJson(path: string, init: RequestInit = {}) { + const response = await polarFetch(path, init) + const text = await response.text() + const payload = parseJson(text) + return { response, text, payload } +} + +function assertPaywallConfig() { + if (!env.polar.accessToken) { + throw new Error("POLAR_ACCESS_TOKEN is required when POLAR_FEATURE_GATE_ENABLED=true") + } + if (!env.polar.productId) { + throw new Error("POLAR_PRODUCT_ID is required when POLAR_FEATURE_GATE_ENABLED=true") + } + if (!env.polar.successUrl) { + throw new Error("POLAR_SUCCESS_URL is required when POLAR_FEATURE_GATE_ENABLED=true") + } + if (!env.polar.returnUrl) { + throw new Error("POLAR_RETURN_URL is required when POLAR_FEATURE_GATE_ENABLED=true") + } +} + +async function getCustomerStateByExternalId(externalCustomerId: string): Promise { + const encodedExternalId = encodeURIComponent(externalCustomerId) + const { response, payload, text } = await polarFetchJson(`/v1/customers/external/${encodedExternalId}/state`, { + method: "GET", + }) + + if (response.status === 404) { + return null + } + + if (!response.ok) { + throw new Error(`Polar customer state lookup failed (${response.status}): ${text.slice(0, 400)}`) + } + + return payload +} + +async function getCustomerStateById(customerId: string): Promise { + const encodedCustomerId = encodeURIComponent(customerId) + const { response, payload, text } = await polarFetchJson(`/v1/customers/${encodedCustomerId}/state`, { + method: "GET", + }) + + if (response.status === 404) { + return null + } + + if (!response.ok) { + throw new Error(`Polar customer state lookup by ID failed (${response.status}): ${text.slice(0, 400)}`) + } + + return payload +} + +async function getCustomerByEmail(email: string): Promise { + const normalizedEmail = email.trim().toLowerCase() + if (!normalizedEmail) { + return null + } + + const encodedEmail = encodeURIComponent(normalizedEmail) + const { response, payload, text } = await polarFetchJson>(`/v1/customers/?email=${encodedEmail}`, { + method: "GET", + }) + + if (!response.ok) { + throw new Error(`Polar customer lookup by email failed (${response.status}): ${text.slice(0, 400)}`) + } + + const customers = payload?.items ?? [] + const exact = customers.find((customer) => customer.email?.trim().toLowerCase() === normalizedEmail) + return exact ?? customers[0] ?? null +} + +async function linkCustomerExternalId(customer: PolarCustomer, externalCustomerId: string): Promise { + if (!customer.id) { + return + } + + if (typeof customer.external_id === "string" && customer.external_id.length > 0) { + return + } + + const encodedCustomerId = encodeURIComponent(customer.id) + await polarFetch(`/v1/customers/${encodedCustomerId}`, { + method: "PATCH", + body: JSON.stringify({ + external_id: externalCustomerId, + }), + }) +} + +function hasRequiredBenefit(state: PolarCustomerState | null) { + if (!state?.granted_benefits || !env.polar.benefitId) { + return false + } + + return state.granted_benefits.some((grant) => grant.benefit_id === env.polar.benefitId) +} + +async function createCheckoutSession(input: CloudAccessInput): Promise { + const payload = { + products: [env.polar.productId], + success_url: env.polar.successUrl, + return_url: env.polar.returnUrl, + external_customer_id: input.userId, + customer_email: input.email, + customer_name: input.name, + } + + const { response, payload: checkout, text } = await polarFetchJson("/v1/checkouts/", { + method: "POST", + body: JSON.stringify(payload), + }) + + if (!response.ok) { + throw new Error(`Polar checkout creation failed (${response.status}): ${text.slice(0, 400)}`) + } + + if (!checkout?.url) { + throw new Error("Polar checkout response missing URL") + } + + return checkout.url +} + +type CloudWorkerAccessEvaluation = { + featureGateEnabled: boolean + hasActivePlan: boolean + checkoutUrl: string | null +} + +async function evaluateCloudWorkerAccess( + input: CloudAccessInput, + options: { includeCheckoutUrl?: boolean } = {}, +): Promise { + if (!env.polar.featureGateEnabled) { + return { + featureGateEnabled: false, + hasActivePlan: true, + checkoutUrl: null, + } + } + + assertPaywallConfig() + + const externalState = await getCustomerStateByExternalId(input.userId) + if (hasRequiredBenefit(externalState)) { + return { + featureGateEnabled: true, + hasActivePlan: true, + checkoutUrl: null, + } + } + + const customer = await getCustomerByEmail(input.email) + if (customer?.id) { + const emailState = await getCustomerStateById(customer.id) + if (hasRequiredBenefit(emailState)) { + await linkCustomerExternalId(customer, input.userId).catch(() => undefined) + return { + featureGateEnabled: true, + hasActivePlan: true, + checkoutUrl: null, + } + } + } + + return { + featureGateEnabled: true, + hasActivePlan: false, + checkoutUrl: options.includeCheckoutUrl ? await createCheckoutSession(input) : null, + } +} + +function normalizeRecurringInterval(value: string | null | undefined): string | null { + return typeof value === "string" && value.trim().length > 0 ? value : null +} + +function normalizeRecurringIntervalCount(value: number | null | undefined): number | null { + return typeof value === "number" && Number.isFinite(value) ? value : null +} + +function isActiveSubscriptionStatus(status: string | null | undefined) { + const normalized = typeof status === "string" ? status.trim().toLowerCase() : "" + return normalized === "active" || normalized === "trialing" +} + +function toBillingSubscription(subscription: PolarSubscription | null): CloudWorkerBillingSubscription | null { + if (!subscription?.id) { + return null + } + + return { + id: subscription.id, + status: typeof subscription.status === "string" ? subscription.status : "unknown", + amount: typeof subscription.amount === "number" ? subscription.amount : null, + currency: typeof subscription.currency === "string" ? subscription.currency : null, + recurringInterval: normalizeRecurringInterval(subscription.recurring_interval), + recurringIntervalCount: normalizeRecurringIntervalCount(subscription.recurring_interval_count), + currentPeriodStart: typeof subscription.current_period_start === "string" ? subscription.current_period_start : null, + currentPeriodEnd: typeof subscription.current_period_end === "string" ? subscription.current_period_end : null, + cancelAtPeriodEnd: subscription.cancel_at_period_end === true, + canceledAt: typeof subscription.canceled_at === "string" ? subscription.canceled_at : null, + endedAt: typeof subscription.ended_at === "string" ? subscription.ended_at : null, + } +} + +function toBillingPriceFromSubscription(subscription: CloudWorkerBillingSubscription | null): CloudWorkerBillingPrice | null { + if (!subscription) { + return null + } + + return { + amount: subscription.amount, + currency: subscription.currency, + recurringInterval: subscription.recurringInterval, + recurringIntervalCount: subscription.recurringIntervalCount, + } +} + +async function getSubscriptionById(subscriptionId: string): Promise { + const encodedId = encodeURIComponent(subscriptionId) + const { response, payload, text } = await polarFetchJson(`/v1/subscriptions/${encodedId}`, { + method: "GET", + }) + + if (response.status === 404) { + return null + } + + if (!response.ok) { + throw new Error(`Polar subscription lookup failed (${response.status}): ${text.slice(0, 400)}`) + } + + return payload +} + +async function listSubscriptionsByExternalCustomer( + externalCustomerId: string, + options: { activeOnly?: boolean; limit?: number } = {}, +): Promise { + const params = new URLSearchParams() + params.set("external_customer_id", externalCustomerId) + if (env.polar.productId) { + params.set("product_id", env.polar.productId) + } + params.set("limit", String(options.limit ?? 1)) + params.set("sorting", "-started_at") + + if (options.activeOnly === true) { + params.set("active", "true") + } + + const lookup = await polarFetchJson>(`/v1/subscriptions/?${params.toString()}`, { + method: "GET", + }) + let response = lookup.response + let payload = lookup.payload + let text = lookup.text + + if (response.status === 422 && params.has("sorting")) { + params.delete("sorting") + const fallbackLookup = await polarFetchJson>(`/v1/subscriptions/?${params.toString()}`, { + method: "GET", + }) + response = fallbackLookup.response + payload = fallbackLookup.payload + text = fallbackLookup.text + } + + if (!response.ok) { + throw new Error(`Polar subscriptions lookup failed (${response.status}): ${text.slice(0, 400)}`) + } + + return payload?.items ?? [] +} + +async function getPrimarySubscriptionForCustomer(externalCustomerId: string): Promise { + const active = await listSubscriptionsByExternalCustomer(externalCustomerId, { activeOnly: true, limit: 1 }) + if (active[0]) { + return active[0] + } + + const recent = await listSubscriptionsByExternalCustomer(externalCustomerId, { activeOnly: false, limit: 1 }) + return recent[0] ?? null +} + +async function listRecentOrdersByExternalCustomer(externalCustomerId: string, limit = 6): Promise { + const params = new URLSearchParams() + params.set("external_customer_id", externalCustomerId) + if (env.polar.productId) { + params.set("product_id", env.polar.productId) + } + params.set("limit", String(limit)) + params.set("sorting", "-created_at") + + const { response, payload, text } = await polarFetchJson>(`/v1/orders/?${params.toString()}`, { + method: "GET", + }) + + if (!response.ok) { + throw new Error(`Polar orders lookup failed (${response.status}): ${text.slice(0, 400)}`) + } + + return payload?.items ?? [] +} + +async function getOrderInvoiceUrl(orderId: string): Promise { + const encodedId = encodeURIComponent(orderId) + const { response, payload, text } = await polarFetchJson(`/v1/orders/${encodedId}/invoice`, { + method: "GET", + }) + + if (response.status === 404) { + return null + } + + if (!response.ok) { + throw new Error(`Polar invoice lookup failed (${response.status}): ${text.slice(0, 400)}`) + } + + return typeof payload?.url === "string" ? payload.url : null +} + +function toBillingInvoice(order: PolarOrder, invoiceUrl: string | null): CloudWorkerBillingInvoice | null { + if (!order.id) { + return null + } + + const totalAmount = + typeof order.total_amount === "number" + ? order.total_amount + : typeof order.net_amount === "number" + ? order.net_amount + : null + + return { + id: order.id, + createdAt: typeof order.created_at === "string" ? order.created_at : null, + status: typeof order.status === "string" ? order.status : "unknown", + totalAmount, + currency: typeof order.currency === "string" ? order.currency : null, + invoiceNumber: typeof order.invoice_number === "string" ? order.invoice_number : null, + invoiceUrl, + } +} + +async function listBillingInvoices(externalCustomerId: string, limit = 6): Promise { + const orders = await listRecentOrdersByExternalCustomer(externalCustomerId, limit) + const invoices = await Promise.all( + orders.map(async (order) => { + const invoiceUrl = order.id && order.is_invoice_generated === true ? await getOrderInvoiceUrl(order.id).catch(() => null) : null + return toBillingInvoice(order, invoiceUrl) + }), + ) + + return invoices.filter((invoice): invoice is CloudWorkerBillingInvoice => invoice !== null) +} + +async function createCustomerPortalUrl(externalCustomerId: string): Promise { + const body = { + external_customer_id: externalCustomerId, + return_url: env.polar.returnUrl ?? env.polar.successUrl ?? null, + } + + const { response, payload, text } = await polarFetchJson("/v1/customer-sessions/", { + method: "POST", + body: JSON.stringify(body), + }) + + if (response.status === 404 || response.status === 422) { + return null + } + + if (!response.ok) { + throw new Error(`Polar customer portal session failed (${response.status}): ${text.slice(0, 400)}`) + } + + return typeof payload?.customer_portal_url === "string" ? payload.customer_portal_url : null +} + +function extractAmountFromProductPrice(price: PolarProductPrice): number | null { + if (price.amount_type === "fixed" && typeof price.price_amount === "number") { + return price.price_amount + } + + if (price.amount_type === "seat_based") { + const firstTier = Array.isArray(price.seat_tiers?.tiers) ? price.seat_tiers?.tiers[0] : null + if (firstTier && typeof firstTier.price_per_seat === "number") { + return firstTier.price_per_seat + } + } + + if (price.amount_type === "custom") { + if (typeof price.preset_amount === "number") { + return price.preset_amount + } + if (typeof price.minimum_amount === "number") { + return price.minimum_amount + } + } + + if (price.amount_type === "free") { + return 0 + } + + return null +} + +function extractBillingPriceFromProduct(product: PolarProduct | null): CloudWorkerBillingPrice | null { + if (!product || !Array.isArray(product.prices)) { + return null + } + + for (const price of product.prices) { + if (!isRecord(price) || price.is_archived === true) { + continue + } + + const amount = extractAmountFromProductPrice(price as PolarProductPrice) + if (amount === null) { + continue + } + + const currency = typeof price.price_currency === "string" ? price.price_currency : null + return { + amount, + currency, + recurringInterval: normalizeRecurringInterval(product.recurring_interval), + recurringIntervalCount: normalizeRecurringIntervalCount(product.recurring_interval_count), + } + } + + return null +} + +async function getProductBillingPrice(productId: string): Promise { + const encodedId = encodeURIComponent(productId) + const { response, payload, text } = await polarFetchJson(`/v1/products/${encodedId}`, { + method: "GET", + }) + + if (response.status === 404) { + return null + } + + if (!response.ok) { + throw new Error(`Polar product lookup failed (${response.status}): ${text.slice(0, 400)}`) + } + + return extractBillingPriceFromProduct(payload) +} + +export async function requireCloudWorkerAccess(input: CloudAccessInput): Promise { + const evaluation = await evaluateCloudWorkerAccess(input, { includeCheckoutUrl: true }) + if (evaluation.hasActivePlan) { + return { allowed: true } + } + + if (!evaluation.checkoutUrl) { + throw new Error("Polar checkout URL unavailable") + } + + return { + allowed: false, + checkoutUrl: evaluation.checkoutUrl, + } +} + +export async function getCloudWorkerBillingStatus( + input: CloudAccessInput, + options: BillingStatusOptions = {}, +): Promise { + const includePortalUrl = options.includePortalUrl !== false + const includeInvoices = options.includeInvoices !== false + const evaluation = await evaluateCloudWorkerAccess(input, { + includeCheckoutUrl: options.includeCheckoutUrl, + }) + + if (!evaluation.featureGateEnabled) { + return { + featureGateEnabled: false, + hasActivePlan: true, + checkoutRequired: false, + checkoutUrl: null, + portalUrl: null, + price: null, + subscription: null, + invoices: [], + } + } + + if (evaluation.hasActivePlan) { + await sendSubscribedToDenEvent(input) + } + + const [subscriptionResult, priceResult, portalResult, invoicesResult] = await Promise.all([ + getPrimarySubscriptionForCustomer(input.userId).catch(() => null), + env.polar.productId ? getProductBillingPrice(env.polar.productId).catch(() => null) : Promise.resolve(null), + includePortalUrl ? createCustomerPortalUrl(input.userId).catch(() => null) : Promise.resolve(null), + includeInvoices ? listBillingInvoices(input.userId).catch(() => []) : Promise.resolve([]), + ]) + + const subscription = toBillingSubscription(subscriptionResult) + const productPrice = priceResult + const portalUrl = portalResult + const invoices = invoicesResult + + return { + featureGateEnabled: evaluation.featureGateEnabled, + hasActivePlan: evaluation.hasActivePlan, + checkoutRequired: evaluation.featureGateEnabled && !evaluation.hasActivePlan, + checkoutUrl: evaluation.checkoutUrl, + portalUrl, + price: productPrice ?? toBillingPriceFromSubscription(subscription), + subscription, + invoices, + } +} + +export async function getCloudWorkerAdminBillingStatus( + input: CloudAccessInput, +): Promise { + if (!env.polar.accessToken) { + return { + status: "unavailable", + featureGateEnabled: env.polar.featureGateEnabled, + subscriptionId: null, + subscriptionStatus: null, + currentPeriodEnd: null, + source: "unavailable", + note: "Polar access token is not configured.", + } + } + + if (!env.polar.benefitId && !env.polar.productId) { + return { + status: "unavailable", + featureGateEnabled: env.polar.featureGateEnabled, + subscriptionId: null, + subscriptionStatus: null, + currentPeriodEnd: null, + source: "unavailable", + note: "Polar product or benefit configuration is missing.", + } + } + + try { + let note: string | null = null + let paidByBenefit = false + + if (env.polar.benefitId) { + const externalState = await getCustomerStateByExternalId(input.userId) + if (hasRequiredBenefit(externalState)) { + paidByBenefit = true + note = "Benefit granted via external customer id." + } else { + const customer = await getCustomerByEmail(input.email) + if (customer?.id) { + const emailState = await getCustomerStateById(customer.id) + if (hasRequiredBenefit(emailState)) { + paidByBenefit = true + note = "Benefit granted via matching customer email." + await linkCustomerExternalId(customer, input.userId).catch(() => undefined) + } + } + } + } + + const subscription = env.polar.productId ? await getPrimarySubscriptionForCustomer(input.userId) : null + const normalizedSubscription = toBillingSubscription(subscription) + const paidBySubscription = isActiveSubscriptionStatus(normalizedSubscription?.status) + + return { + status: paidByBenefit || paidBySubscription ? "paid" : "unpaid", + featureGateEnabled: env.polar.featureGateEnabled, + subscriptionId: normalizedSubscription?.id ?? null, + subscriptionStatus: normalizedSubscription?.status ?? null, + currentPeriodEnd: normalizedSubscription?.currentPeriodEnd ?? null, + source: paidByBenefit ? "benefit" : "subscription", + note: + note ?? + (normalizedSubscription + ? "Subscription status resolved from Polar." + : "No active billing record was found for this user."), + } + } catch (error) { + return { + status: "unavailable", + featureGateEnabled: env.polar.featureGateEnabled, + subscriptionId: null, + subscriptionStatus: null, + currentPeriodEnd: null, + source: "unavailable", + note: error instanceof Error ? error.message : "Billing lookup failed.", + } + } +} + +export async function setCloudWorkerSubscriptionCancellation( + input: CloudAccessInput, + cancelAtPeriodEnd: boolean, +): Promise { + if (!env.polar.featureGateEnabled) { + return null + } + + assertPaywallConfig() + + const activeSubscriptions = await listSubscriptionsByExternalCustomer(input.userId, { + activeOnly: true, + limit: 1, + }) + const active = activeSubscriptions[0] + if (!active?.id) { + return null + } + + const encodedId = encodeURIComponent(active.id) + const { response, payload, text } = await polarFetchJson(`/v1/subscriptions/${encodedId}`, { + method: "PATCH", + body: JSON.stringify({ + cancel_at_period_end: cancelAtPeriodEnd, + }), + }) + + if (!response.ok) { + throw new Error(`Polar subscription update failed (${response.status}): ${text.slice(0, 400)}`) + } + + if (payload?.id) { + return toBillingSubscription(payload) + } + + const refreshed = await getSubscriptionById(active.id) + return toBillingSubscription(refreshed) +} diff --git a/ee/apps/den-api/src/db.ts b/ee/apps/den-api/src/db.ts new file mode 100644 index 0000000000..bf48dfb132 --- /dev/null +++ b/ee/apps/den-api/src/db.ts @@ -0,0 +1,8 @@ +import { createDenDb } from "@openwork-ee/den-db" +import { env } from "./env.js" + +export const { db } = createDenDb({ + databaseUrl: env.databaseUrl, + mode: env.dbMode, + planetscale: env.planetscale, +}) diff --git a/ee/apps/den-api/src/email.ts b/ee/apps/den-api/src/email.ts new file mode 100644 index 0000000000..5ce42ddfe1 --- /dev/null +++ b/ee/apps/den-api/src/email.ts @@ -0,0 +1,169 @@ +import { env } from "./env.js" + +const LOOPS_TRANSACTIONAL_API_URL = "https://app.loops.so/api/v1/transactional" + +/** + * Error thrown when a transactional email send fails or is skipped because + * of misconfiguration. Handlers can inspect `.reason` to decide how to + * surface the failure to the caller (e.g. map to an HTTP status). + */ +export class DenEmailSendError extends Error { + readonly reason: + | "loops_not_configured" + | "loops_rejected" + | "loops_network" + readonly template: "verification" | "organization_invite" + readonly recipient: string + readonly detail?: string + + constructor(input: { + template: DenEmailSendError["template"] + reason: DenEmailSendError["reason"] + recipient: string + detail?: string + }) { + super( + `[${input.template}] email for ${input.recipient} failed: ${input.reason}${ + input.detail ? ` (${input.detail})` : "" + }`, + ) + this.name = "DenEmailSendError" + this.reason = input.reason + this.template = input.template + this.recipient = input.recipient + this.detail = input.detail + } +} + +async function postLoopsTransactional(input: { + transactionalId: string + email: string + dataVariables: Record + template: DenEmailSendError["template"] +}): Promise { + let response: Response + try { + response = await fetch(LOOPS_TRANSACTIONAL_API_URL, { + method: "POST", + headers: { + Authorization: `Bearer ${env.loops.apiKey}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + transactionalId: input.transactionalId, + email: input.email, + dataVariables: input.dataVariables, + }), + }) + } catch (error) { + const message = error instanceof Error ? error.message : "Unknown error" + throw new DenEmailSendError({ + template: input.template, + reason: "loops_network", + recipient: input.email, + detail: message, + }) + } + + if (response.ok) { + return + } + + let detail = `status ${response.status}` + try { + const payload = (await response.json()) as { message?: string } + if (payload.message?.trim()) { + detail = payload.message + } + } catch { + // Ignore invalid upstream payloads. + } + + throw new DenEmailSendError({ + template: input.template, + reason: "loops_rejected", + recipient: input.email, + detail, + }) +} + +export async function sendDenVerificationEmail(input: { + email: string + verificationCode: string +}) { + const email = input.email.trim() + const verificationCode = input.verificationCode.trim() + + if (!email || !verificationCode) { + return + } + + if (env.devMode) { + console.info(`[auth] dev verification email payload for ${email}: ${JSON.stringify({ verificationCode })}`) + return + } + + if (!env.loops.apiKey || !env.loops.transactionalIdDenVerifyEmail) { + throw new DenEmailSendError({ + template: "verification", + reason: "loops_not_configured", + recipient: email, + }) + } + + await postLoopsTransactional({ + transactionalId: env.loops.transactionalIdDenVerifyEmail, + email, + dataVariables: { verificationCode }, + template: "verification", + }) +} + +export async function sendDenOrganizationInvitationEmail(input: { + email: string + inviteLink: string + invitedByName: string + invitedByEmail: string + organizationName: string + role: string +}) { + const email = input.email.trim() + + if (!email) { + return + } + + if (env.devMode) { + console.info( + `[auth] dev organization invite email payload for ${email}: ${JSON.stringify({ + inviteLink: input.inviteLink, + invitedByName: input.invitedByName, + invitedByEmail: input.invitedByEmail, + organizationName: input.organizationName, + role: input.role, + })}`, + ) + return + } + + if (!env.loops.apiKey || !env.loops.transactionalIdDenOrgInviteEmail) { + throw new DenEmailSendError({ + template: "organization_invite", + reason: "loops_not_configured", + recipient: email, + }) + } + + await postLoopsTransactional({ + transactionalId: env.loops.transactionalIdDenOrgInviteEmail, + email, + dataVariables: { + inviteLink: input.inviteLink, + invitedByName: input.invitedByName, + invitedByEmail: input.invitedByEmail, + organizationName: input.organizationName, + role: input.role, + }, + template: "organization_invite", + }) +} diff --git a/ee/apps/den-api/src/env.ts b/ee/apps/den-api/src/env.ts new file mode 100644 index 0000000000..681ad9c94a --- /dev/null +++ b/ee/apps/den-api/src/env.ts @@ -0,0 +1,289 @@ +import { DEN_WORKER_POLL_INTERVAL_MS } from "./CONSTS.js" +import { z } from "zod" + +const EnvSchema = z.object({ + DATABASE_URL: z.string().min(1).optional(), + DATABASE_HOST: z.string().min(1).optional(), + DATABASE_USERNAME: z.string().min(1).optional(), + DATABASE_PASSWORD: z.string().optional(), + DEN_DB_ENCRYPTION_KEY: z.string().trim().min(32), + DB_MODE: z.enum(["mysql", "planetscale"]).optional(), + BETTER_AUTH_SECRET: z.string().min(32), + BETTER_AUTH_URL: z.string().min(1), + DEN_MCP_RESOURCE_URL: z.string().optional(), + DEN_BETTER_AUTH_TRUSTED_ORIGINS: z.string().optional(), + GITHUB_CLIENT_ID: z.string().optional(), + GITHUB_CLIENT_SECRET: z.string().optional(), + GITHUB_CONNECTOR_APP_ID: z.string().optional(), + GITHUB_CONNECTOR_APP_CLIENT_ID: z.string().optional(), + GITHUB_CONNECTOR_APP_CLIENT_SECRET: z.string().optional(), + GITHUB_CONNECTOR_APP_PRIVATE_KEY: z.string().optional(), + GITHUB_CONNECTOR_APP_WEBHOOK_SECRET: z.string().optional(), + GOOGLE_CLIENT_ID: z.string().optional(), + GOOGLE_CLIENT_SECRET: z.string().optional(), + LOOPS_API_KEY: z.string().optional(), + LOOPS_TRANSACTIONAL_ID_DEN_VERIFY_EMAIL: z.string().optional(), + LOOPS_TRANSACTIONAL_ID_DEN_ORG_INVITE_EMAIL: z.string().optional(), + OPENWORK_DEV_MODE: z.string().optional(), + PORT: z.string().optional(), + CORS_ORIGINS: z.string().optional(), + WORKER_PROXY_PORT: z.string().optional(), + PROVISIONER_MODE: z.enum(["stub", "render", "daytona"]).optional(), + WORKER_URL_TEMPLATE: z.string().optional(), + WORKER_ACTIVITY_BASE_URL: z.string().optional(), + OPENWORK_DAYTONA_ENV_PATH: z.string().optional(), + RENDER_API_BASE: z.string().optional(), + RENDER_API_KEY: z.string().optional(), + RENDER_OWNER_ID: z.string().optional(), + RENDER_WORKER_REPO: z.string().optional(), + RENDER_WORKER_BRANCH: z.string().optional(), + RENDER_WORKER_ROOT_DIR: z.string().optional(), + RENDER_WORKER_PLAN: z.string().optional(), + RENDER_WORKER_REGION: z.string().optional(), + RENDER_WORKER_OPENWORK_VERSION: z.string().optional(), + RENDER_WORKER_NAME_PREFIX: z.string().optional(), + RENDER_WORKER_PUBLIC_DOMAIN_SUFFIX: z.string().optional(), + RENDER_CUSTOM_DOMAIN_READY_TIMEOUT_MS: z.string().optional(), + RENDER_PROVISION_TIMEOUT_MS: z.string().optional(), + RENDER_HEALTHCHECK_TIMEOUT_MS: z.string().optional(), + RENDER_POLL_INTERVAL_MS: z.string().optional(), + VERCEL_API_BASE: z.string().optional(), + VERCEL_TOKEN: z.string().optional(), + VERCEL_TEAM_ID: z.string().optional(), + VERCEL_TEAM_SLUG: z.string().optional(), + VERCEL_DNS_DOMAIN: z.string().optional(), + POLAR_FEATURE_GATE_ENABLED: z.string().optional(), + POLAR_API_BASE: z.string().optional(), + POLAR_ACCESS_TOKEN: z.string().optional(), + POLAR_PRODUCT_ID: z.string().optional(), + POLAR_BENEFIT_ID: z.string().optional(), + POLAR_SUCCESS_URL: z.string().optional(), + POLAR_RETURN_URL: z.string().optional(), + DAYTONA_API_URL: z.string().optional(), + DAYTONA_API_KEY: z.string().optional(), + DAYTONA_TARGET: z.string().optional(), + DAYTONA_SNAPSHOT: z.string().optional(), + DAYTONA_SANDBOX_IMAGE: z.string().optional(), + DAYTONA_SANDBOX_CPU: z.string().optional(), + DAYTONA_SANDBOX_MEMORY: z.string().optional(), + DAYTONA_SANDBOX_DISK: z.string().optional(), + DAYTONA_SANDBOX_PUBLIC: z.string().optional(), + DAYTONA_SANDBOX_AUTO_STOP_INTERVAL: z.string().optional(), + DAYTONA_SANDBOX_AUTO_ARCHIVE_INTERVAL: z.string().optional(), + DAYTONA_SANDBOX_AUTO_DELETE_INTERVAL: z.string().optional(), + DAYTONA_SIGNED_PREVIEW_EXPIRES_SECONDS: z.string().optional(), + DAYTONA_WORKER_PROXY_BASE_URL: z.string().optional(), + DAYTONA_SANDBOX_NAME_PREFIX: z.string().optional(), + DAYTONA_SHARED_VOLUME_NAME: z.string().optional(), + DAYTONA_VOLUME_NAME_PREFIX: z.string().optional(), + DAYTONA_WORKSPACE_MOUNT_PATH: z.string().optional(), + DAYTONA_DATA_MOUNT_PATH: z.string().optional(), + DAYTONA_RUNTIME_WORKSPACE_PATH: z.string().optional(), + DAYTONA_RUNTIME_DATA_PATH: z.string().optional(), + DAYTONA_SIDECAR_DIR: z.string().optional(), + DAYTONA_OPENWORK_PORT: z.string().optional(), + DAYTONA_OPENCODE_PORT: z.string().optional(), + DAYTONA_CREATE_TIMEOUT_SECONDS: z.string().optional(), + DAYTONA_DELETE_TIMEOUT_SECONDS: z.string().optional(), + DAYTONA_HEALTHCHECK_TIMEOUT_MS: z.string().optional(), +}).superRefine((value, ctx) => { + const inferredMode = value.DB_MODE ?? (value.DATABASE_URL ? "mysql" : "planetscale") + + if (inferredMode === "mysql" && !value.DATABASE_URL) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "DATABASE_URL is required when using mysql mode", + path: ["DATABASE_URL"], + }) + } + + if (inferredMode === "planetscale") { + for (const key of ["DATABASE_HOST", "DATABASE_USERNAME", "DATABASE_PASSWORD"] as const) { + if (!value[key]) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `${key} is required when using planetscale mode`, + path: [key], + }) + } + } + } +}) + +const parsed = EnvSchema.parse(process.env) + +function splitCsv(value: string | undefined) { + return (value ?? "") + .split(",") + .map((entry) => entry.trim()) + .filter(Boolean) +} + +function optionalString(value: string | undefined) { + const trimmed = value?.trim() + return trimmed ? trimmed : undefined +} + +function normalizeOrigin(origin: string) { + const value = origin.trim() + if (value === "*") { + return value + } + return value.replace(/\/+$/, "") +} + +const corsOrigins = splitCsv(parsed.CORS_ORIGINS).map((origin) => normalizeOrigin(origin)) +const betterAuthTrustedOrigins = splitCsv(parsed.DEN_BETTER_AUTH_TRUSTED_ORIGINS) + .map((origin) => normalizeOrigin(origin)) + +const polarFeatureGateEnabled = + (parsed.POLAR_FEATURE_GATE_ENABLED ?? "false").toLowerCase() === "true" + +const devMode = (parsed.OPENWORK_DEV_MODE ?? "0").trim() === "1" +const port = Number(parsed.PORT ?? "8790") + +const daytonaSandboxPublic = + (parsed.DAYTONA_SANDBOX_PUBLIC ?? "false").toLowerCase() === "true" + +const planetscaleCredentials = + parsed.DATABASE_HOST && parsed.DATABASE_USERNAME && parsed.DATABASE_PASSWORD !== undefined + ? { + host: parsed.DATABASE_HOST, + username: parsed.DATABASE_USERNAME, + password: parsed.DATABASE_PASSWORD, + } + : null + +export const env = { + databaseUrl: parsed.DATABASE_URL, + dbEncryptionKey: optionalString(parsed.DEN_DB_ENCRYPTION_KEY), + dbMode: parsed.DB_MODE ?? (parsed.DATABASE_URL ? "mysql" : "planetscale"), + planetscale: planetscaleCredentials, + betterAuthSecret: parsed.BETTER_AUTH_SECRET, + betterAuthUrl: normalizeOrigin(parsed.BETTER_AUTH_URL), + mcpResourceUrl: optionalString(parsed.DEN_MCP_RESOURCE_URL) + ? normalizeOrigin(parsed.DEN_MCP_RESOURCE_URL!) + : devMode + ? `http://127.0.0.1:${port}/mcp` + : undefined, + betterAuthTrustedOrigins: betterAuthTrustedOrigins.length > 0 ? betterAuthTrustedOrigins : corsOrigins, + devMode, + github: { + clientId: optionalString(parsed.GITHUB_CLIENT_ID), + clientSecret: optionalString(parsed.GITHUB_CLIENT_SECRET), + }, + githubConnectorApp: { + appId: optionalString(parsed.GITHUB_CONNECTOR_APP_ID), + clientId: optionalString(parsed.GITHUB_CONNECTOR_APP_CLIENT_ID), + clientSecret: optionalString(parsed.GITHUB_CONNECTOR_APP_CLIENT_SECRET), + privateKey: optionalString(parsed.GITHUB_CONNECTOR_APP_PRIVATE_KEY), + webhookSecret: optionalString(parsed.GITHUB_CONNECTOR_APP_WEBHOOK_SECRET), + }, + google: { + clientId: optionalString(parsed.GOOGLE_CLIENT_ID), + clientSecret: optionalString(parsed.GOOGLE_CLIENT_SECRET), + }, + loops: { + apiKey: optionalString(parsed.LOOPS_API_KEY), + transactionalIdDenVerifyEmail: optionalString(parsed.LOOPS_TRANSACTIONAL_ID_DEN_VERIFY_EMAIL), + transactionalIdDenOrgInviteEmail: optionalString(parsed.LOOPS_TRANSACTIONAL_ID_DEN_ORG_INVITE_EMAIL), + }, + port, + workerProxyPort: Number(parsed.WORKER_PROXY_PORT ?? "8789"), + corsOrigins, + provisionerMode: parsed.PROVISIONER_MODE ?? "daytona", + workerUrlTemplate: parsed.WORKER_URL_TEMPLATE, + workerActivityBaseUrl: + optionalString(parsed.WORKER_ACTIVITY_BASE_URL) ?? + parsed.BETTER_AUTH_URL.trim().replace(/\/+$/, ""), + render: { + apiBase: parsed.RENDER_API_BASE ?? "https://api.render.com/v1", + apiKey: parsed.RENDER_API_KEY, + ownerId: parsed.RENDER_OWNER_ID, + workerRepo: + parsed.RENDER_WORKER_REPO ?? "https://github.com/different-ai/openwork", + workerBranch: parsed.RENDER_WORKER_BRANCH ?? "dev", + workerRootDir: + parsed.RENDER_WORKER_ROOT_DIR ?? "ee/apps/den-worker-runtime", + workerPlan: parsed.RENDER_WORKER_PLAN ?? "standard", + workerRegion: parsed.RENDER_WORKER_REGION ?? "oregon", + workerOpenworkVersion: parsed.RENDER_WORKER_OPENWORK_VERSION, + workerNamePrefix: parsed.RENDER_WORKER_NAME_PREFIX ?? "den-worker", + workerPublicDomainSuffix: parsed.RENDER_WORKER_PUBLIC_DOMAIN_SUFFIX, + customDomainReadyTimeoutMs: Number( + parsed.RENDER_CUSTOM_DOMAIN_READY_TIMEOUT_MS ?? "240000", + ), + provisionTimeoutMs: Number(parsed.RENDER_PROVISION_TIMEOUT_MS ?? "900000"), + healthcheckTimeoutMs: Number( + parsed.RENDER_HEALTHCHECK_TIMEOUT_MS ?? "180000", + ), + pollIntervalMs: Number(parsed.RENDER_POLL_INTERVAL_MS ?? "5000"), + }, + vercel: { + apiBase: parsed.VERCEL_API_BASE ?? "https://api.vercel.com", + token: parsed.VERCEL_TOKEN, + teamId: parsed.VERCEL_TEAM_ID, + teamSlug: parsed.VERCEL_TEAM_SLUG, + dnsDomain: parsed.VERCEL_DNS_DOMAIN, + }, + polar: { + featureGateEnabled: polarFeatureGateEnabled, + apiBase: parsed.POLAR_API_BASE ?? "https://api.polar.sh", + accessToken: parsed.POLAR_ACCESS_TOKEN, + productId: parsed.POLAR_PRODUCT_ID, + benefitId: parsed.POLAR_BENEFIT_ID, + successUrl: parsed.POLAR_SUCCESS_URL, + returnUrl: parsed.POLAR_RETURN_URL, + }, + daytona: { + envPath: optionalString(parsed.OPENWORK_DAYTONA_ENV_PATH), + apiUrl: optionalString(parsed.DAYTONA_API_URL) ?? "https://app.daytona.io/api", + apiKey: optionalString(parsed.DAYTONA_API_KEY), + target: optionalString(parsed.DAYTONA_TARGET), + snapshot: optionalString(parsed.DAYTONA_SNAPSHOT), + image: optionalString(parsed.DAYTONA_SANDBOX_IMAGE) ?? "node:20-bookworm", + resources: { + cpu: Number(parsed.DAYTONA_SANDBOX_CPU ?? "2"), + memory: Number(parsed.DAYTONA_SANDBOX_MEMORY ?? "4"), + disk: Number(parsed.DAYTONA_SANDBOX_DISK ?? "8"), + }, + public: daytonaSandboxPublic, + autoStopInterval: Number(parsed.DAYTONA_SANDBOX_AUTO_STOP_INTERVAL ?? "0"), + autoArchiveInterval: Number( + parsed.DAYTONA_SANDBOX_AUTO_ARCHIVE_INTERVAL ?? "10080", + ), + autoDeleteInterval: Number( + parsed.DAYTONA_SANDBOX_AUTO_DELETE_INTERVAL ?? "-1", + ), + signedPreviewExpiresSeconds: Number( + parsed.DAYTONA_SIGNED_PREVIEW_EXPIRES_SECONDS ?? "86400", + ), + workerProxyBaseUrl: + optionalString(parsed.DAYTONA_WORKER_PROXY_BASE_URL) ?? "https://workers.den.openworklabs", + sandboxNamePrefix: + optionalString(parsed.DAYTONA_SANDBOX_NAME_PREFIX) ?? "den-daytona-worker", + sharedVolumeName: + optionalString(parsed.DAYTONA_SHARED_VOLUME_NAME) ?? + optionalString(parsed.DAYTONA_VOLUME_NAME_PREFIX) ?? + "den-daytona-workers", + workspaceMountPath: + optionalString(parsed.DAYTONA_WORKSPACE_MOUNT_PATH) ?? "/workspace", + dataMountPath: + optionalString(parsed.DAYTONA_DATA_MOUNT_PATH) ?? "/persist/openwork", + runtimeWorkspacePath: + optionalString(parsed.DAYTONA_RUNTIME_WORKSPACE_PATH) ?? + "/tmp/openwork-workspace", + runtimeDataPath: + optionalString(parsed.DAYTONA_RUNTIME_DATA_PATH) ?? "/tmp/openwork-data", + sidecarDir: + optionalString(parsed.DAYTONA_SIDECAR_DIR) ?? "/tmp/openwork-sidecars", + openworkPort: Number(parsed.DAYTONA_OPENWORK_PORT ?? "8787"), + opencodePort: Number(parsed.DAYTONA_OPENCODE_PORT ?? "4096"), + createTimeoutSeconds: Number(parsed.DAYTONA_CREATE_TIMEOUT_SECONDS ?? "300"), + deleteTimeoutSeconds: Number(parsed.DAYTONA_DELETE_TIMEOUT_SECONDS ?? "120"), + healthcheckTimeoutMs: Number( + parsed.DAYTONA_HEALTHCHECK_TIMEOUT_MS ?? "300000", + ), + pollIntervalMs: DEN_WORKER_POLL_INTERVAL_MS, + }, +} diff --git a/ee/apps/den-api/src/generated/app-version.ts b/ee/apps/den-api/src/generated/app-version.ts new file mode 100644 index 0000000000..04fd0ca543 --- /dev/null +++ b/ee/apps/den-api/src/generated/app-version.ts @@ -0,0 +1 @@ +export const BUILD_LATEST_APP_VERSION = "0.11.212" as const diff --git a/ee/apps/den-api/src/index.ts b/ee/apps/den-api/src/index.ts new file mode 100644 index 0000000000..1bd62f6388 --- /dev/null +++ b/ee/apps/den-api/src/index.ts @@ -0,0 +1,3 @@ +import app from "./app.js" + +export default app diff --git a/ee/apps/den-api/src/llm/models-dev.ts b/ee/apps/den-api/src/llm/models-dev.ts new file mode 100644 index 0000000000..f2eb93d864 --- /dev/null +++ b/ee/apps/den-api/src/llm/models-dev.ts @@ -0,0 +1,143 @@ +const MODELS_DEV_API_URL = "https://models.dev/api.json" +const MODELS_DEV_CACHE_TTL_MS = 1000 * 60 * 10 + +type JsonRecord = Record + +export type ModelsDevProviderSummary = { + id: string + name: string + npm: string | null + env: string[] + doc: string | null + api: string | null + modelCount: number +} + +export type ModelsDevModel = { + id: string + name: string + config: JsonRecord +} + +export type ModelsDevProvider = { + id: string + name: string + npm: string | null + env: string[] + doc: string | null + api: string | null + config: JsonRecord + models: ModelsDevModel[] +} + +let modelsDevCache: + | { + expiresAt: number + providers: ModelsDevProvider[] + providersById: Map + } + | null = null + +function isRecord(value: unknown): value is JsonRecord { + return typeof value === "object" && value !== null && !Array.isArray(value) +} + +function asString(value: unknown): string | null { + return typeof value === "string" && value.trim().length > 0 ? value : null +} + +function asStringList(value: unknown): string[] { + return Array.isArray(value) + ? value.filter((entry): entry is string => typeof entry === "string" && entry.trim().length > 0) + : [] +} + +async function loadModelsDevCatalog() { + if (modelsDevCache && modelsDevCache.expiresAt > Date.now()) { + return modelsDevCache + } + + const response = await fetch(MODELS_DEV_API_URL, { + headers: { + Accept: "application/json", + "User-Agent": "OpenWork Den API", + }, + }) + + if (!response.ok) { + throw new Error(`models.dev returned ${response.status}`) + } + + const payload = await response.json() + if (!isRecord(payload)) { + throw new Error("models.dev returned an invalid payload") + } + + const providers = Object.entries(payload) + .map(([providerKey, rawProvider]) => { + if (!isRecord(rawProvider)) { + return null + } + + const providerId = asString(rawProvider.id) ?? providerKey + const name = asString(rawProvider.name) ?? providerId + const modelsRecord = isRecord(rawProvider.models) ? rawProvider.models : {} + const { models: _models, ...providerConfig } = rawProvider + const models = Object.entries(modelsRecord) + .map(([modelKey, rawModel]) => { + if (!isRecord(rawModel)) { + return null + } + + const modelId = asString(rawModel.id) ?? modelKey + const modelName = asString(rawModel.name) ?? modelId + return { + id: modelId, + name: modelName, + config: rawModel, + } satisfies ModelsDevModel + }) + .filter((entry): entry is ModelsDevModel => entry !== null) + .sort((left, right) => left.name.localeCompare(right.name)) + + return { + id: providerId, + name, + npm: asString(rawProvider.npm), + env: asStringList(rawProvider.env), + doc: asString(rawProvider.doc), + api: asString(rawProvider.api), + config: providerConfig, + models, + } satisfies ModelsDevProvider + }) + .filter((entry): entry is ModelsDevProvider => entry !== null) + .sort((left, right) => left.name.localeCompare(right.name)) + + const nextCache = { + expiresAt: Date.now() + MODELS_DEV_CACHE_TTL_MS, + providers, + providersById: new Map(providers.map((provider) => [provider.id, provider])), + } + + modelsDevCache = nextCache + return nextCache +} + +export async function listModelsDevProviders(): Promise { + const catalog = await loadModelsDevCatalog() + return catalog.providers.map((provider) => ({ + id: provider.id, + name: provider.name, + npm: provider.npm, + env: provider.env, + doc: provider.doc, + api: provider.api, + modelCount: provider.models.length, + })) +} + +export async function getModelsDevProvider(providerId: string): Promise { + const catalog = await loadModelsDevCatalog() + return catalog.providersById.get(providerId) ?? null +} diff --git a/ee/apps/den-api/src/load-env.ts b/ee/apps/den-api/src/load-env.ts new file mode 100644 index 0000000000..3e3830565f --- /dev/null +++ b/ee/apps/den-api/src/load-env.ts @@ -0,0 +1,15 @@ +import { existsSync } from "node:fs" +import path from "node:path" +import { fileURLToPath } from "node:url" +import dotenv from "dotenv" + +const srcDir = path.dirname(fileURLToPath(import.meta.url)) +const serviceDir = path.resolve(srcDir, "..") + +for (const filePath of [path.join(serviceDir, ".env.local"), path.join(serviceDir, ".env")]) { + if (existsSync(filePath)) { + dotenv.config({ path: filePath, override: false }) + } +} + +dotenv.config({ override: false }) diff --git a/ee/apps/den-api/src/loops.ts b/ee/apps/den-api/src/loops.ts new file mode 100644 index 0000000000..626facb21b --- /dev/null +++ b/ee/apps/den-api/src/loops.ts @@ -0,0 +1,121 @@ +import { env } from "./env.js" + +const LOOPS_CONTACTS_UPDATE_URL = "https://app.loops.so/api/v1/contacts/update" +const LOOPS_EVENTS_SEND_URL = "https://app.loops.so/api/v1/events/send" +const DEN_SIGNUP_SOURCE = "signup" +const SUBSCRIBED_TO_DEN_EVENT = "subscribedToDen" + +function splitName(name: string) { + const parts = name.trim().split(/\s+/).filter(Boolean) + return { + firstName: parts[0] ?? "", + lastName: parts.slice(1).join(" ") || undefined, + } +} + +export async function syncDenSignupContact(input: { + email: string + name?: string | null +}) { + const apiKey = env.loops.apiKey + if (!apiKey) { + return + } + + const email = input.email.trim() + if (!email) { + return + } + + const name = input.name?.trim() + const { firstName, lastName } = name ? splitName(name) : { firstName: "", lastName: undefined } + + try { + const response = await fetch(LOOPS_CONTACTS_UPDATE_URL, { + method: "PUT", + headers: { + Authorization: `Bearer ${apiKey}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + email, + firstName: firstName || undefined, + lastName, + source: DEN_SIGNUP_SOURCE, + }), + }) + + if (response.ok) { + return + } + + let detail = `status ${response.status}` + try { + const payload = (await response.json()) as { message?: string } + if (payload.message?.trim()) { + detail = payload.message + } + } catch { + // Ignore non-JSON error bodies from Loops. + } + + console.warn(`[auth] failed to sync Loops contact for ${email}: ${detail}`) + } catch (error) { + const message = error instanceof Error ? error.message : "Unknown error" + console.warn(`[auth] failed to sync Loops contact for ${email}: ${message}`) + } +} + +export async function sendSubscribedToDenEvent(input: { + email: string + name?: string | null +}) { + const apiKey = env.loops.apiKey + if (!apiKey) { + return + } + + const email = input.email.trim() + if (!email) { + return + } + + const name = input.name?.trim() + const { firstName, lastName } = name ? splitName(name) : { firstName: "", lastName: undefined } + + try { + const response = await fetch(LOOPS_EVENTS_SEND_URL, { + method: "POST", + headers: { + Authorization: `Bearer ${apiKey}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + email, + eventName: SUBSCRIBED_TO_DEN_EVENT, + firstName: firstName || undefined, + lastName, + eventProperties: { + subscribedAt: new Date().toISOString(), + }, + }), + }) + + if (!response.ok) { + let detail = `status ${response.status}` + try { + const payload = (await response.json()) as { message?: string } + if (payload.message?.trim()) { + detail = payload.message + } + } catch { + // Ignore non-JSON error bodies from Loops. + } + + console.warn(`[billing] failed to send Loops event ${SUBSCRIBED_TO_DEN_EVENT} for ${email}: ${detail}`) + } + } catch (error) { + const message = error instanceof Error ? error.message : "Unknown error" + console.warn(`[billing] failed to send Loops event ${SUBSCRIBED_TO_DEN_EVENT} for ${email}: ${message}`) + } +} diff --git a/ee/apps/den-api/src/mcp/README.md b/ee/apps/den-api/src/mcp/README.md new file mode 100644 index 0000000000..0999c3fdd1 --- /dev/null +++ b/ee/apps/den-api/src/mcp/README.md @@ -0,0 +1,72 @@ +# Den API MCP Exposure Policy + +The MCP catalog is generated from `openapi.json`, then filtered by `policy.ts` before tools are registered. + +## Allowed Tags + +Every tagged Den API product surface is allowed unless it is listed under blocked tags or blocked operation IDs: + +- `API Keys` +- `Config Objects` +- `Connectors` +- `GitHub` +- `Invitations` +- `LLM Providers` +- `Marketplaces` +- `Members` +- `Organizations` +- `Plugins` +- `Roles` +- `Skill Hubs` +- `Skills` +- `Teams` +- `Users` +- `Worker Activity` +- `Worker Runtime` +- `Workers` + +## Blocked Tags + +These tags are intentionally excluded from MCP: + +- `Admin`: internal administrative controls should not be broadly exposed as agent tools. +- `Authentication`: OAuth/session plumbing is used to authorize MCP, not exposed through MCP. +- `System`: health, docs, and other service metadata are not product actions. +- `Webhooks`: external webhook ingress routes require provider signatures and should not be invoked by agents. + +## Blocked Paths + +Routes are blocked if their path: + +- starts with `/api/auth` +- contains `/admin` +- contains `/webhooks` + +This catches auth/admin/webhook routes even if they are untagged or incorrectly tagged. + +## Blocked Operation IDs + +These individual operations are blocked even though their tags may otherwise be allowed: + +- `postApiKeys`: creating API keys returns credentials and should stay behind explicit UI/API flows. +- `postV1ApiKeys`: generated OpenAPI ID for creating API keys. +- `deleteApiKeysByApiKeyId`: destructive credential revocation should stay behind explicit UI/API flows. +- `deleteV1ApiKeysByApiKeyId`: generated OpenAPI ID for deleting API keys, if present. +- `deleteOrg`: defensive block for organization deletion if a route is added. +- `deleteV1Org`: defensive block for organization deletion if a route is added. +- `deleteV1OrgsByOrgId`: defensive block for organization deletion if a route is added. +- `postWorkersByWorkerIdTokens`: worker token minting returns credentials and should stay behind explicit UI/API flows. +- `postV1WorkersByWorkerIdTokens`: generated OpenAPI ID for worker token minting. + +## Untagged Operations + +Untagged operations are excluded by default. Today these are OAuth/MCP discovery and registration routes, for example: + +- `/.well-known/oauth-authorization-server` +- `/.well-known/openid-configuration` +- `/.well-known/oauth-protected-resource` +- `/api/auth/oauth2/authorize` +- `/api/auth/oauth2/register` +- `/register` + +They are required for OAuth/MCP setup, but should not appear as callable MCP tools. diff --git a/ee/apps/den-api/src/mcp/auth.ts b/ee/apps/den-api/src/mcp/auth.ts new file mode 100644 index 0000000000..fffd596f15 --- /dev/null +++ b/ee/apps/den-api/src/mcp/auth.ts @@ -0,0 +1,167 @@ +import * as crypto from "node:crypto" +import { eq } from "@openwork-ee/den-db/drizzle" +import { OAuthAccessTokenTable } from "@openwork-ee/den-db/schema" +import { verifyJwsAccessToken } from "better-auth/oauth2" +import { + auth, + DEN_MCP_OPAQUE_ACCESS_TOKEN_PREFIX, + DEN_MCP_ORG_ID_CLAIM, + DEN_MCP_RESOURCE, + DEN_MCP_RESOURCE_CLAIM, + DEN_MCP_RESOURCES, + DEN_MCP_TOKEN_USE_CLAIM, +} from "../auth.js" +import { db } from "../db.js" +import { env } from "../env.js" + +export type McpPrincipal = { + userId: string + organizationId: string + scopes: Set + payload: Record +} + +export function getMcpResourceUrl(request: Request) { + const url = new URL(request.url) + const requestResource = `${url.origin}/mcp` + return DEN_MCP_RESOURCES.includes(requestResource) ? requestResource : DEN_MCP_RESOURCE +} + +function readBearerToken(headers: Headers) { + const authorization = headers.get("authorization")?.trim() ?? "" + const match = authorization.match(/^Bearer\s+(.+)$/i) + return match?.[1]?.trim() || null +} + +function hashStoredToken(token: string) { + return crypto.createHash("sha256").update(token).digest("base64url") +} + +function readStoredScopes(scopes: string) { + try { + const parsed = JSON.parse(scopes) as unknown + if (Array.isArray(parsed)) return parsed.filter((entry): entry is string => typeof entry === "string") + } catch { + // Older rows or custom stores may keep scopes as a space-delimited string. + } + return scopes.split(/\s+/).filter(Boolean) +} + +function readScopes(payload: Record) { + const scope = typeof payload.scope === "string" ? payload.scope : "" + const scopes = Array.isArray(payload.scopes) ? payload.scopes : [] + return new Set([ + ...scope.split(/\s+/).filter(Boolean), + ...scopes.filter((entry: unknown): entry is string => typeof entry === "string"), + ]) +} + +function readStringClaim(payload: Record, claim: string) { + const value = payload[claim] + return typeof value === "string" && value.trim() ? value.trim() : null +} + +async function getJwks() { + const response = await auth.handler(new Request(`${env.betterAuthUrl}/api/auth/jwks`)) + if (!response.ok) { + throw new Error("Unable to load auth JWKS") + } + return response.json() +} + +async function verifyJwtMcpToken(token: string) { + const payload = await verifyJwsAccessToken(token, { + jwksFetch: getJwks, + verifyOptions: { + issuer: `${env.betterAuthUrl}/api/auth`, + audience: DEN_MCP_RESOURCES, + }, + }) + return payload as Record +} + +async function verifyOpaqueMcpToken(token: string) { + if (!token.startsWith(DEN_MCP_OPAQUE_ACCESS_TOKEN_PREFIX)) { + return null + } + + const storedToken = hashStoredToken(token.slice(DEN_MCP_OPAQUE_ACCESS_TOKEN_PREFIX.length)) + const [accessToken] = await db + .select() + .from(OAuthAccessTokenTable) + .where(eq(OAuthAccessTokenTable.token, storedToken)) + .limit(1) + + if (!accessToken || accessToken.expiresAt <= new Date()) { + return null + } + + const storedScopes = readStoredScopes(accessToken.scopes) + return { + sub: accessToken.userId, + scope: storedScopes.join(" "), + client_id: accessToken.clientId, + exp: Math.floor(accessToken.expiresAt.getTime() / 1000), + iat: Math.floor(accessToken.createdAt.getTime() / 1000), + [DEN_MCP_TOKEN_USE_CLAIM]: "mcp", + [DEN_MCP_RESOURCE_CLAIM]: DEN_MCP_RESOURCE, + ...(accessToken.referenceId ? { [DEN_MCP_ORG_ID_CLAIM]: accessToken.referenceId } : {}), + } +} + +export async function verifyMcpRequest(headers: Headers, resourceUrl = DEN_MCP_RESOURCE): Promise { + const token = readBearerToken(headers) + if (!token) { + return new Response(JSON.stringify({ error: "missing_mcp_token" }), { + status: 401, + headers: { + "content-type": "application/json", + "www-authenticate": `Bearer resource_metadata="${resourceUrl}/.well-known/oauth-protected-resource"`, + }, + }) + } + + const payload = token.includes(".") + ? await verifyJwtMcpToken(token).catch(() => null) + : await verifyOpaqueMcpToken(token) + if (!payload) { + return new Response(JSON.stringify({ error: "invalid_mcp_token" }), { + status: 401, + headers: { "content-type": "application/json" }, + }) + } + + const scopes = readScopes(payload) + if (!scopes.has("mcp:read") && !scopes.has("mcp:write")) { + return new Response(JSON.stringify({ error: "insufficient_mcp_scope" }), { + status: 403, + headers: { "content-type": "application/json" }, + }) + } + + if (readStringClaim(payload, DEN_MCP_TOKEN_USE_CLAIM) !== "mcp") { + return new Response(JSON.stringify({ error: "wrong_token_use" }), { + status: 403, + headers: { "content-type": "application/json" }, + }) + } + + const resource = readStringClaim(payload, DEN_MCP_RESOURCE_CLAIM) + if (resource && !DEN_MCP_RESOURCES.includes(resource)) { + return new Response(JSON.stringify({ error: "wrong_mcp_resource" }), { + status: 403, + headers: { "content-type": "application/json" }, + }) + } + + const userId = typeof payload.sub === "string" ? payload.sub : null + const organizationId = readStringClaim(payload, DEN_MCP_ORG_ID_CLAIM) + if (!userId || !organizationId) { + return new Response(JSON.stringify({ error: "missing_mcp_principal" }), { + status: 403, + headers: { "content-type": "application/json" }, + }) + } + + return { userId, organizationId, scopes, payload } +} diff --git a/ee/apps/den-api/src/mcp/catalog.ts b/ee/apps/den-api/src/mcp/catalog.ts new file mode 100644 index 0000000000..c6a439ad94 --- /dev/null +++ b/ee/apps/den-api/src/mcp/catalog.ts @@ -0,0 +1,220 @@ +import type { Hono } from "hono" +import { z } from "zod" +import { isMcpOperationAllowed, type OpenApiOperation } from "./policy.js" + +const METHODS = new Set(["get", "post", "put", "patch", "delete"]) + +type OpenApiDocument = { + paths?: Record> +} + +type OpenApiParameter = { + name?: unknown + in?: unknown + required?: unknown + description?: unknown + schema?: { + type?: unknown + format?: unknown + enum?: unknown[] + default?: unknown + } +} + +type OpenApiRequestBody = { + required?: unknown + content?: unknown +} + +type McpInputSchema = z.ZodObject> + +export type McpToolOperation = { + name: string + method: string + path: string + operation: OpenApiOperation + inputSchema: McpInputSchema +} + +function isOpenApiParameter(value: unknown): value is OpenApiParameter { + return typeof value === "object" && value !== null +} + +function getParameters(operation: OpenApiOperation, location: "path" | "query") { + return (operation.parameters ?? []) + .filter(isOpenApiParameter) + .filter((parameter) => parameter.in === location && typeof parameter.name === "string" && parameter.name.length > 0) +} + +function schemaForParameter(parameter: OpenApiParameter) { + const schema = parameter.schema + const type = schema?.type + const enumValues = schema?.enum + + let valueSchema: z.ZodTypeAny + if (Array.isArray(enumValues) && enumValues.length > 0 && enumValues.every((value): value is string => typeof value === "string")) { + valueSchema = z.enum(enumValues as [string, ...string[]]) + } else if (type === "number" || type === "integer") { + valueSchema = z.number() + } else if (type === "boolean") { + valueSchema = z.boolean() + } else { + valueSchema = z.string() + } + + if (typeof parameter.description === "string" && parameter.description.trim().length > 0) { + valueSchema = valueSchema.describe(parameter.description) + } + + return valueSchema +} + +function objectForParameters(parameters: OpenApiParameter[], requiredByDefault: boolean) { + const shape: Record = {} + + for (const parameter of parameters) { + const name = parameter.name as string + const required = requiredByDefault || parameter.required === true + const schema = schemaForParameter(parameter) + shape[name] = required ? schema : schema.optional() + } + + return z.object(shape).strict() +} + +function pathParameterNamesFromTemplate(path: string) { + return [...path.matchAll(/\{([^}]+)\}/g)].map((match) => match[1]).filter(Boolean) +} + +function buildPathSchema(path: string, operation: OpenApiOperation) { + const documentedParameters = getParameters(operation, "path") + const byName = new Map(documentedParameters.map((parameter) => [parameter.name as string, parameter])) + const parameters = pathParameterNamesFromTemplate(path).map((name) => byName.get(name) ?? { name, in: "path", required: true }) + + return parameters.length > 0 ? objectForParameters(parameters, true) : undefined +} + +function buildQuerySchema(operation: OpenApiOperation) { + const parameters = getParameters(operation, "query") + return parameters.length > 0 ? objectForParameters(parameters, false) : undefined +} + +function hasJsonRequestBody(operation: OpenApiOperation) { + const requestBody = getRequestBody(operation) + const content = requestBody?.content + return typeof content === "object" && content !== null && "application/json" in content +} + +function getRequestBody(operation: OpenApiOperation): OpenApiRequestBody | null { + const requestBody = operation.requestBody + return typeof requestBody === "object" && requestBody !== null ? requestBody : null +} + +function buildInputSchema(path: string, operation: OpenApiOperation) { + const shape: Record = {} + const pathSchema = buildPathSchema(path, operation) + const querySchema = buildQuerySchema(operation) + + if (pathSchema) { + shape.path = pathSchema.describe("URL path parameters. Put values for route placeholders here, not in body.") + } + + if (querySchema) { + shape.query = querySchema.describe("URL query string parameters.").optional() + } + + if (hasJsonRequestBody(operation)) { + const bodySchema = z.unknown().describe("JSON request body fields for this operation.") + shape.body = getRequestBody(operation)?.required === true ? bodySchema : bodySchema.optional() + } + + return z.object(shape).strict() +} + +function buildInputGuidance(input: McpToolOperation) { + const sections: string[] = [] + const pathNames = pathParameterNamesFromTemplate(input.path) + const queryNames = getParameters(input.operation, "query").map((parameter) => parameter.name as string) + + if (pathNames.length > 0) { + sections.push(`Path parameters: put ${pathNames.map((name) => `\`${name}\``).join(", ")} under \`path\`.`) + } + + if (queryNames.length > 0) { + sections.push(`Query parameters: put ${queryNames.map((name) => `\`${name}\``).join(", ")} under \`query\`.`) + } + + if (hasJsonRequestBody(input.operation)) { + sections.push("Request body: put JSON body fields under `body`. Do not wrap them in `requestBody`.") + } + + if (sections.length === 0) { + return null + } + + return [ + "MCP input shape:", + ...sections, + "Do not send OpenAPI wrapper keys like `parameters` or `requestBody`.", + ].join("\n") +} + +export async function loadOpenApiDocument(app: Hono, env: unknown): Promise { + const response = await app.fetch(new Request("http://den-api.local/openapi.json"), env) + if (!response.ok) { + throw new Error(`Unable to load Den OpenAPI document: ${response.status}`) + } + return response.json() as Promise +} + +function buildDescription(input: McpToolOperation) { + const parts = [ + input.operation.summary, + input.operation.description, + `${input.method.toUpperCase()} ${input.path}`, + buildInputGuidance(input), + ].filter((part): part is string => typeof part === "string" && part.trim().length > 0) + + return parts.join("\n\n") +} + +export function getToolDescription(operation: McpToolOperation) { + return buildDescription(operation) +} + +export function buildMcpCatalog(document: OpenApiDocument): McpToolOperation[] { + const operations: McpToolOperation[] = [] + const names = new Set() + + for (const [path, pathItem] of Object.entries(document.paths ?? {})) { + for (const [method, operation] of Object.entries(pathItem)) { + if (!METHODS.has(method.toLowerCase())) { + continue + } + + if (!isMcpOperationAllowed({ method, path, operation })) { + continue + } + + const name = operation.operationId + if (!name) { + continue + } + + if (names.has(name)) { + throw new Error(`Duplicate MCP tool operationId: ${name}`) + } + names.add(name) + + operations.push({ + name, + method: method.toUpperCase(), + path, + operation, + inputSchema: buildInputSchema(path, operation), + }) + } + } + + return operations.sort((a, b) => a.name.localeCompare(b.name)) +} diff --git a/ee/apps/den-api/src/mcp/index.ts b/ee/apps/den-api/src/mcp/index.ts new file mode 100644 index 0000000000..834b85c655 --- /dev/null +++ b/ee/apps/den-api/src/mcp/index.ts @@ -0,0 +1,59 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js" +import { StreamableHTTPTransport } from "@hono/mcp" +import type { Hono } from "hono" +import { getMcpResourceUrl, verifyMcpRequest } from "./auth.js" +import { buildMcpCatalog, getToolDescription, loadOpenApiDocument } from "./catalog.js" +import { invokeMcpOperation } from "./invoke.js" + +function protectedResourceMetadata(request: Request) { + const resource = getMcpResourceUrl(request) + return { + resource, + authorization_servers: [resource.replace(/\/mcp$/, "/api/auth")], + scopes_supported: ["mcp:read", "mcp:write"], + bearer_methods_supported: ["header"], + } +} + +export function registerMcpRoutes }>(app: Hono) { + app.get("/.well-known/oauth-protected-resource", (c) => c.json(protectedResourceMetadata(c.req.raw))) + app.get("/.well-known/oauth-protected-resource/mcp", (c) => c.json(protectedResourceMetadata(c.req.raw))) + app.get("/mcp/.well-known/oauth-protected-resource", (c) => c.json(protectedResourceMetadata(c.req.raw))) + + app.all("/mcp", async (c) => { + const principal = await verifyMcpRequest(c.req.raw.headers, getMcpResourceUrl(c.req.raw)) + if (principal instanceof Response) { + return principal + } + + const document = await loadOpenApiDocument(app as unknown as Hono, c.env) + const catalog = buildMcpCatalog(document) + const server = new McpServer({ + name: "openwork-den-api", + version: "1.0.0", + }) + + for (const operation of catalog) { + server.registerTool( + operation.name, + { + title: operation.operation.summary ?? operation.name, + description: getToolDescription(operation), + inputSchema: operation.inputSchema, + }, + async (toolInput) => invokeMcpOperation({ + app: app as unknown as Hono, + env: c.env, + operation, + principal, + toolInput, + }), + ) + } + + const transport = new StreamableHTTPTransport() + await server.connect(transport) + const response = await transport.handleRequest(c) + return response ?? new Response(null, { status: 204 }) + }) +} diff --git a/ee/apps/den-api/src/mcp/invoke.ts b/ee/apps/den-api/src/mcp/invoke.ts new file mode 100644 index 0000000000..276f8a93c2 --- /dev/null +++ b/ee/apps/den-api/src/mcp/invoke.ts @@ -0,0 +1,103 @@ +import type { Hono } from "hono" +import { createInternalMcpPrincipalHeader } from "../session.js" +import type { McpPrincipal } from "./auth.js" +import type { McpToolOperation } from "./catalog.js" +import { requiredScopeForMethod } from "./policy.js" + +type ToolInput = { + path?: Record + query?: Record + body?: unknown +} + +function encodeQueryValue(value: unknown) { + if (value === null || value === undefined) { + return null + } + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + return String(value) + } + return JSON.stringify(value) +} + +function buildPath(template: string, values: Record) { + return template.replace(/\{([^}]+)\}/g, (_match, key: string) => { + const value = values[key] + if (value === null || value === undefined) { + throw new Error(`Missing path parameter: ${key}`) + } + return encodeURIComponent(String(value)) + }) +} + +function buildInternalRequest(input: { + operation: McpToolOperation + toolInput: ToolInput + principal: McpPrincipal +}) { + const path = buildPath(input.operation.path, input.toolInput.path ?? {}) + const url = new URL(path, "http://den-api.local") + + for (const [key, value] of Object.entries(input.toolInput.query ?? {})) { + const encoded = encodeQueryValue(value) + if (encoded !== null) { + url.searchParams.set(key, encoded) + } + } + + const headers = new Headers({ + accept: "application/json", + "x-den-internal-mcp-principal": createInternalMcpPrincipalHeader({ + userId: input.principal.userId, + organizationId: input.principal.organizationId, + }), + }) + + let body: BodyInit | undefined + if (input.operation.method !== "GET" && input.operation.method !== "HEAD" && input.toolInput.body !== undefined) { + headers.set("content-type", "application/json") + body = JSON.stringify(input.toolInput.body) + } + + return new Request(url, { + method: input.operation.method, + headers, + body, + }) +} + +export async function invokeMcpOperation(input: { + app: Hono + env: unknown + operation: McpToolOperation + principal: McpPrincipal + toolInput: ToolInput +}) { + const requiredScope = requiredScopeForMethod(input.operation.method) + if (!input.principal.scopes.has(requiredScope)) { + return { + isError: true, + content: [{ type: "text" as const, text: JSON.stringify({ error: "insufficient_mcp_scope", requiredScope }) }], + } + } + + let request: Request + try { + request = buildInternalRequest(input) + } catch (error) { + return { + isError: true, + content: [{ type: "text" as const, text: JSON.stringify({ error: "invalid_tool_input", message: error instanceof Error ? error.message : String(error) }) }], + } + } + + const response = await input.app.fetch(request, input.env) + const contentType = response.headers.get("content-type") ?? "" + const payload = contentType.includes("application/json") ? await response.json() : await response.text() + const text = typeof payload === "string" ? payload : JSON.stringify(payload, null, 2) + + return { + isError: response.status >= 400, + content: [{ type: "text" as const, text }], + } +} diff --git a/ee/apps/den-api/src/mcp/policy.ts b/ee/apps/den-api/src/mcp/policy.ts new file mode 100644 index 0000000000..605de9631d --- /dev/null +++ b/ee/apps/den-api/src/mcp/policy.ts @@ -0,0 +1,80 @@ +const BLOCKED_TAGS = new Set(["Admin", "Authentication", "System", "Webhooks"]) +const SAFE_INCLUDED_TAGS = new Set([ + "Users", + "Organizations", + "Invitations", + "API Keys", + "Members", + "Roles", + "Teams", + "Templates", + "LLM Providers", + "Skills", + "Skill Hubs", + "Workers", + "Worker Runtime", + "Worker Activity", + "Config Objects", + "Plugins", + "Marketplaces", + "Connectors", + "GitHub", +]) + +const BLOCKED_OPERATION_IDS = new Set([ + "postApiKeys", + "postV1ApiKeys", + "deleteApiKeysByApiKeyId", + "deleteV1ApiKeysByApiKeyId", + "deleteOrg", + "deleteV1Org", + "deleteV1OrgsByOrgId", + "postWorkersByWorkerIdTokens", + "postV1WorkersByWorkerIdTokens", +]) + +export type OpenApiOperation = { + operationId?: string + summary?: string + description?: string + tags?: string[] + parameters?: unknown[] + requestBody?: unknown + security?: unknown + [key: string]: unknown +} + +export function isMcpOperationAllowed(input: { + method: string + path: string + operation: OpenApiOperation +}) { + const explicit = input.operation["x-mcp"] + if (explicit === false || explicit === "false") { + return false + } + + const operationId = input.operation.operationId + if (!operationId || BLOCKED_OPERATION_IDS.has(operationId)) { + return false + } + + if (input.path.startsWith("/api/auth") || input.path.includes("/webhooks") || input.path.includes("/admin")) { + return false + } + + const tags = input.operation.tags ?? [] + if (tags.some((tag) => BLOCKED_TAGS.has(tag))) { + return false + } + + if (explicit === true || explicit === "true") { + return true + } + + return tags.some((tag) => SAFE_INCLUDED_TAGS.has(tag)) +} + +export function requiredScopeForMethod(method: string) { + return method.toUpperCase() === "GET" ? "mcp:read" : "mcp:write" +} diff --git a/ee/apps/den-api/src/middleware/README.md b/ee/apps/den-api/src/middleware/README.md new file mode 100644 index 0000000000..e6583199fd --- /dev/null +++ b/ee/apps/den-api/src/middleware/README.md @@ -0,0 +1,43 @@ +# Middleware + +This folder contains reusable Hono middleware that route areas can compose as needed. + +## Files + +- `index.ts`: public export surface for all shared middleware +- `admin.ts`: requires an authenticated allowlisted admin +- `current-user.ts`: requires an authenticated user +- `user-organizations.ts`: loads the orgs the current user belongs to +- `organization-context.ts`: loads org + current member context for `:orgSlug` routes +- `member-teams.ts`: loads the teams the current org member belongs to +- `validation.ts`: shared Hono Zod validator wrappers for JSON, query, and params + +## Available context + +- `c.get("user")`: current authenticated user +- `c.get("session")`: current Better Auth session +- `c.get("userOrganizations")`: orgs for the current user +- `c.get("activeOrganizationId")` +- `c.get("activeOrganizationSlug")` +- `c.get("organizationContext")`: org record, current member, members, invites, roles +- `c.get("memberTeams")`: teams for the current org member + +## Usage pattern + +Import from `src/middleware/index.ts`: + +```ts +import { + jsonValidator, + paramValidator, + requireUserMiddleware, + resolveOrganizationContextMiddleware, +} from "../../middleware/index.js" +``` + +Then compose only what a route needs. + +## Rule of thumb + +- If a value is broadly useful across multiple route areas, put it here +- If a helper only exists for one route area, keep it in that route folder instead diff --git a/ee/apps/den-api/src/middleware/admin.ts b/ee/apps/den-api/src/middleware/admin.ts new file mode 100644 index 0000000000..2cdf2d17cd --- /dev/null +++ b/ee/apps/den-api/src/middleware/admin.ts @@ -0,0 +1,36 @@ +import { eq } from "@openwork-ee/den-db/drizzle" +import { AdminAllowlistTable } from "@openwork-ee/den-db/schema" +import type { MiddlewareHandler } from "hono" +import { ensureAdminAllowlistSeeded } from "../admin-allowlist.js" +import { db } from "../db.js" +import type { AuthContextVariables } from "../session.js" + +function normalizeEmail(value: string | null | undefined) { + return value?.trim().toLowerCase() ?? "" +} + +export const requireAdminMiddleware: MiddlewareHandler<{ Variables: AuthContextVariables }> = async (c, next) => { + const user = c.get("user") + if (!user?.id) { + return c.json({ error: "unauthorized" }, 401) as never + } + + const email = normalizeEmail(user.email) + if (!email) { + return c.json({ error: "admin_email_required" }, 403) as never + } + + await ensureAdminAllowlistSeeded() + + const allowed = await db + .select({ id: AdminAllowlistTable.id }) + .from(AdminAllowlistTable) + .where(eq(AdminAllowlistTable.email, email)) + .limit(1) + + if (allowed.length === 0) { + return c.json({ error: "forbidden" }, 403) as never + } + + await next() +} diff --git a/ee/apps/den-api/src/middleware/current-user.ts b/ee/apps/den-api/src/middleware/current-user.ts new file mode 100644 index 0000000000..51423635fe --- /dev/null +++ b/ee/apps/den-api/src/middleware/current-user.ts @@ -0,0 +1,10 @@ +import type { MiddlewareHandler } from "hono" +import type { AuthContextVariables } from "../session.js" + +export const requireUserMiddleware: MiddlewareHandler<{ Variables: AuthContextVariables }> = async (c, next) => { + if (!c.get("user")?.id) { + return c.json({ error: "unauthorized" }, 401) as never + } + + await next() +} diff --git a/ee/apps/den-api/src/middleware/index.ts b/ee/apps/den-api/src/middleware/index.ts new file mode 100644 index 0000000000..d59f176073 --- /dev/null +++ b/ee/apps/den-api/src/middleware/index.ts @@ -0,0 +1,6 @@ +export * from "./admin.js" +export * from "./current-user.js" +export * from "./user-organizations.js" +export * from "./organization-context.js" +export * from "./member-teams.js" +export * from "./validation.js" diff --git a/ee/apps/den-api/src/middleware/member-teams.ts b/ee/apps/den-api/src/middleware/member-teams.ts new file mode 100644 index 0000000000..d14b716ca9 --- /dev/null +++ b/ee/apps/den-api/src/middleware/member-teams.ts @@ -0,0 +1,25 @@ +import type { MiddlewareHandler } from "hono" +import { listTeamsForMember, type MemberTeamSummary } from "../orgs.js" +import type { AuthContextVariables } from "../session.js" +import type { OrganizationContextVariables } from "./organization-context.js" + +export type MemberTeamsContext = { + memberTeams: MemberTeamSummary[] +} + +export const resolveMemberTeamsMiddleware: MiddlewareHandler<{ + Variables: AuthContextVariables & Partial & Partial +}> = async (c, next) => { + const context = c.get("organizationContext") + if (!context) { + return c.json({ error: "organization_context_required" }, 500) as never + } + + const memberTeams = await listTeamsForMember({ + organizationId: context.organization.id, + memberId: context.currentMember.id, + }) + + c.set("memberTeams", memberTeams) + await next() +} diff --git a/ee/apps/den-api/src/middleware/organization-context.ts b/ee/apps/den-api/src/middleware/organization-context.ts new file mode 100644 index 0000000000..5415893050 --- /dev/null +++ b/ee/apps/den-api/src/middleware/organization-context.ts @@ -0,0 +1,91 @@ +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { MiddlewareHandler } from "hono" +import { getApiKeyScopedOrganizationId, isScopedApiKeyForOrganization } from "../api-keys.js" +import { getOrganizationContextForUser, resolveUserOrganizations, type OrganizationContext } from "../orgs.js" +import type { AuthContextVariables } from "../session.js" +import { getLegacyProxyOrganizationId, hydrateSessionActiveOrganization, shouldHydrateSessionActiveOrganization, type UserOrganizationsContext } from "./user-organizations.js" + +export type OrganizationContextVariables = { + organizationContext: OrganizationContext +} + +export const resolveOrganizationContextMiddleware: MiddlewareHandler<{ + Variables: AuthContextVariables & Partial & Partial +}> = async (c, next) => { + const user = c.get("user") + if (!user?.id) { + return c.json({ error: "unauthorized" }, 401) as never + } + + const apiKey = c.get("apiKey") + const apiKeyScopedOrganizationId = getApiKeyScopedOrganizationId(apiKey) + const legacyProxyOrganizationId = getLegacyProxyOrganizationId(c.req.raw.headers) + const scopedOrganizationId = apiKeyScopedOrganizationId ?? legacyProxyOrganizationId + + let organizationId = c.get("activeOrganizationId") ?? null + let organizationSlug = c.get("activeOrganizationSlug") ?? null + + if (!organizationId) { + const session = c.get("session") + const resolved = await resolveUserOrganizations({ + activeOrganizationId: scopedOrganizationId ?? session?.activeOrganizationId ?? null, + userId: normalizeDenTypeId("user", user.id), + }) + + const scopedOrgs = scopedOrganizationId + ? resolved.orgs.filter((org) => org.id === scopedOrganizationId) + : resolved.orgs + + organizationId = scopedOrganizationId ? scopedOrgs[0]?.id ?? null : resolved.activeOrgId + organizationSlug = scopedOrganizationId ? scopedOrgs[0]?.slug ?? null : resolved.activeOrgSlug + + if (shouldHydrateSessionActiveOrganization({ + scopedOrganizationId: apiKeyScopedOrganizationId, + sessionActiveOrganizationId: session?.activeOrganizationId, + resolvedActiveOrganizationId: organizationId, + })) { + await hydrateSessionActiveOrganization(session, organizationId) + if (session) { + c.set("session", { ...session, activeOrganizationId: organizationId }) + } + } + + c.set("userOrganizations", scopedOrgs) + c.set("activeOrganizationId", organizationId) + c.set("activeOrganizationSlug", organizationSlug) + } + + if (!organizationId) { + return c.json({ error: "organization_not_found" }, 404) as never + } + + const normalizedOrganizationId = normalizeDenTypeId("organization", organizationId) + + const context = await getOrganizationContextForUser({ + userId: normalizeDenTypeId("user", user.id), + organizationId: normalizedOrganizationId, + }) + + if (!context) { + return c.json({ error: "organization_not_found" }, 404) as never + } + + if (apiKey && !isScopedApiKeyForOrganization({ apiKey, organizationId: normalizedOrganizationId })) { + return c.json({ + error: "forbidden", + message: "This API key is scoped to a different organization.", + }, 403) as never + } + + if (apiKey?.metadata?.orgMembershipId && apiKey.metadata.orgMembershipId !== context.currentMember.id) { + return c.json({ + error: "forbidden", + message: "This API key is no longer valid for the current organization member.", + }, 403) as never + } + + c.set("organizationContext", context) + c.set("activeOrganizationId", context.organization.id) + c.set("activeOrganizationSlug", context.organization.slug) + await next() +} diff --git a/ee/apps/den-api/src/middleware/user-organizations.ts b/ee/apps/den-api/src/middleware/user-organizations.ts new file mode 100644 index 0000000000..a539d15039 --- /dev/null +++ b/ee/apps/den-api/src/middleware/user-organizations.ts @@ -0,0 +1,94 @@ +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { MiddlewareHandler } from "hono" +import { getApiKeyScopedOrganizationId } from "../api-keys.js" +import { resolveUserOrganizations, setSessionActiveOrganization, type UserOrgSummary } from "../orgs.js" +import type { AuthContextVariables } from "../session.js" + +export const LEGACY_ORG_PROXY_HEADER = "x-openwork-legacy-org-id" + +export type UserOrganizationsContext = { + userOrganizations: UserOrgSummary[] + activeOrganizationId: string | null + activeOrganizationSlug: string | null +} + +type SessionLike = AuthContextVariables["session"] + +export function getLegacyProxyOrganizationId(headers: Headers) { + const rawOrganizationId = headers.get(LEGACY_ORG_PROXY_HEADER)?.trim() + if (!rawOrganizationId) { + return null + } + + try { + return normalizeDenTypeId("organization", rawOrganizationId) + } catch { + return null + } +} + +export function shouldHydrateSessionActiveOrganization(input: { + resolvedActiveOrganizationId: string | null + scopedOrganizationId: string | null + sessionActiveOrganizationId?: string | null +}) { + return !input.scopedOrganizationId && !input.sessionActiveOrganizationId && !!input.resolvedActiveOrganizationId +} + +export async function hydrateSessionActiveOrganization(session: SessionLike, organizationId: string | null) { + if (!session?.id || !organizationId || session.activeOrganizationId === organizationId) { + return + } + + try { + const sessionId = normalizeDenTypeId("session", session.id) + const normalizedOrganizationId = normalizeDenTypeId("organization", organizationId) + await setSessionActiveOrganization(sessionId, normalizedOrganizationId) + } catch { + return + } +} + +export const resolveUserOrganizationsMiddleware: MiddlewareHandler<{ + Variables: AuthContextVariables & Partial +}> = async (c, next) => { + const user = c.get("user") + if (!user?.id) { + return c.json({ error: "unauthorized" }, 401) as never + } + + const session = c.get("session") + const apiKey = c.get("apiKey") + const apiKeyScopedOrganizationId = getApiKeyScopedOrganizationId(apiKey) + const legacyProxyOrganizationId = getLegacyProxyOrganizationId(c.req.raw.headers) + const scopedOrganizationId = apiKeyScopedOrganizationId ?? legacyProxyOrganizationId + const resolved = await resolveUserOrganizations({ + activeOrganizationId: scopedOrganizationId ?? session?.activeOrganizationId ?? null, + userId: normalizeDenTypeId("user", user.id), + }) + + const scopedOrgs = scopedOrganizationId + ? resolved.orgs.filter((org) => org.id === scopedOrganizationId) + : resolved.orgs + + const activeOrganizationId = scopedOrganizationId ? scopedOrgs[0]?.id ?? null : resolved.activeOrgId + const activeOrganizationSlug = scopedOrganizationId + ? scopedOrgs[0]?.slug ?? null + : resolved.activeOrgSlug + + if (shouldHydrateSessionActiveOrganization({ + scopedOrganizationId: apiKeyScopedOrganizationId, + sessionActiveOrganizationId: session?.activeOrganizationId, + resolvedActiveOrganizationId: activeOrganizationId, + })) { + await hydrateSessionActiveOrganization(session, activeOrganizationId) + if (session) { + c.set("session", { ...session, activeOrganizationId }) + } + } + + c.set("userOrganizations", scopedOrgs) + c.set("activeOrganizationId", activeOrganizationId) + c.set("activeOrganizationSlug", activeOrganizationSlug) + await next() +} diff --git a/ee/apps/den-api/src/middleware/validation.ts b/ee/apps/den-api/src/middleware/validation.ts new file mode 100644 index 0000000000..4c3e531196 --- /dev/null +++ b/ee/apps/den-api/src/middleware/validation.ts @@ -0,0 +1,36 @@ +import { validator as zValidator } from "hono-openapi" +import type { ZodSchema } from "zod" + +function invalidRequestResponse(result: { success: false; error: unknown }, c: { json: (body: unknown, status?: number) => Response }) { + return c.json( + { + error: "invalid_request", + details: result.error, + }, + 400, + ) +} + +export function jsonValidator(schema: T) { + return zValidator("json", schema, (result, c) => { + if (!result.success) { + return invalidRequestResponse(result, c) + } + }) +} + +export function queryValidator(schema: T) { + return zValidator("query", schema, (result, c) => { + if (!result.success) { + return invalidRequestResponse(result, c) + } + }) +} + +export function paramValidator(schema: T) { + return zValidator("param", schema, (result, c) => { + if (!result.success) { + return invalidRequestResponse(result, c) + } + }) +} diff --git a/ee/apps/den-api/src/openapi.ts b/ee/apps/den-api/src/openapi.ts new file mode 100644 index 0000000000..6274378838 --- /dev/null +++ b/ee/apps/den-api/src/openapi.ts @@ -0,0 +1,114 @@ +import { type DenTypeIdName, typeId } from "@openwork-ee/utils/typeid" +import { resolver } from "hono-openapi" +import { z } from "zod" + +const TYPE_ID_EXAMPLE_SUFFIX = "01h2xcejqtf2nbrexx3vqjhp41" + +function toPascalCase(value: string) { + return value + .replace(/[^a-zA-Z0-9]+/g, " ") + .trim() + .split(/\s+/) + .filter(Boolean) + .map((part) => part.charAt(0).toUpperCase() + part.slice(1)) + .join("") +} + +export function buildOperationId(method: string, path: string) { + const parts = path + .split("/") + .filter(Boolean) + .filter((part) => part !== "v1") + .map((part) => { + if (part.startsWith(":")) { + return `by-${part.slice(1)}` + } + + if (part === "*") { + return "wildcard" + } + + return part + }) + + return [method.toLowerCase(), ...parts] + .map(toPascalCase) + .join("") + .replace(/^[A-Z]/, (char) => char.toLowerCase()) +} + +export function denTypeIdSchema(typeName: TName) { + const prefix = typeId.prefix[typeName] + return typeId.schema(typeName).describe(`Den TypeID with '${prefix}_' prefix.`).meta({ + description: `Den TypeID with '${prefix}_' prefix and a ${typeId.suffixLength}-character base32 suffix.`, + examples: [`${prefix}_${TYPE_ID_EXAMPLE_SUFFIX}`], + format: "typeid", + }) +} + +const validationIssueSchema = z.object({ + message: z.string(), + path: z.array(z.union([z.string(), z.number()])).optional(), +}).passthrough() + +export const invalidRequestSchema = z.object({ + error: z.literal("invalid_request"), + details: z.array(validationIssueSchema), +}).meta({ ref: "InvalidRequestError" }) + +export const unauthorizedSchema = z.object({ + error: z.literal("unauthorized"), +}).meta({ ref: "UnauthorizedError" }) + +export const forbiddenSchema = z.object({ + error: z.literal("forbidden"), + message: z.string().optional(), +}).meta({ ref: "ForbiddenError" }) + +export const notFoundSchema = z.object({ + error: z.string(), + message: z.string().optional(), +}).meta({ ref: "NotFoundError" }) + +export const successSchema = z.object({ + success: z.literal(true), +}).meta({ ref: "SuccessResponse" }) + +export const emptyObjectSchema = z.object({}).passthrough().meta({ ref: "OpaqueObject" }) + +export function jsonResponse(description: string, schema: z.ZodTypeAny) { + return { + description, + content: { + "application/json": { + schema: resolver(schema), + }, + }, + } +} + +export function htmlResponse(description: string) { + return { + description, + content: { + "text/html": { + schema: resolver(z.string()), + }, + }, + } +} + +export function textResponse(description: string) { + return { + description, + content: { + "text/plain": { + schema: resolver(z.string()), + }, + }, + } +} + +export function emptyResponse(description: string) { + return { description } +} diff --git a/ee/apps/den-api/src/organization-access.ts b/ee/apps/den-api/src/organization-access.ts new file mode 100644 index 0000000000..6f2d7a1b2e --- /dev/null +++ b/ee/apps/den-api/src/organization-access.ts @@ -0,0 +1,15 @@ +import { createAccessControl } from "better-auth/plugins/access" +import { defaultRoles, defaultStatements } from "better-auth/plugins/organization/access" + +export const denOrganizationAccess = createAccessControl(defaultStatements) + +export const denOrganizationStaticRoles = { + owner: defaultRoles.owner, + admin: defaultRoles.admin, + member: defaultRoles.member, +} as const + +export const denDefaultDynamicOrganizationRoles = { + admin: defaultRoles.admin.statements, + member: defaultRoles.member.statements, +} as const diff --git a/ee/apps/den-api/src/organization-limits.ts b/ee/apps/den-api/src/organization-limits.ts new file mode 100644 index 0000000000..71728c5416 --- /dev/null +++ b/ee/apps/den-api/src/organization-limits.ts @@ -0,0 +1,196 @@ +import { and, eq, gt, sql } from "@openwork-ee/den-db/drizzle" +import { InvitationTable, MemberTable, OrganizationTable, WorkerTable } from "@openwork-ee/den-db/schema" +import { db } from "./db.js" + +export const DEFAULT_ORGANIZATION_LIMITS = { + members: 5, + workers: 1, +} as const + +export type OrganizationLimitType = keyof typeof DEFAULT_ORGANIZATION_LIMITS + +export type OrganizationLimits = { + members: number + workers: number +} + +type OrganizationId = typeof OrganizationTable.$inferSelect.id + +export type OrganizationMetadata = { + limits: OrganizationLimits + allowedDesktopVersions?: string[] +} & Record + +type OrganizationMetadataInput = Record | string | null | undefined + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null +} + +function normalizePositiveInteger(value: unknown, fallback: number) { + if (typeof value === "number" && Number.isInteger(value) && value > 0) { + return value + } + + if (typeof value === "string") { + const parsed = Number(value) + if (Number.isInteger(parsed) && parsed > 0) { + return parsed + } + } + + return fallback +} + +function normalizeDesktopVersionString(value: unknown) { + if (typeof value !== "string") { + return null + } + + const normalized = value.trim().replace(/^v/i, "") + return /^\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?(?:\+[0-9A-Za-z.-]+)?$/.test(normalized) + ? normalized + : null +} + +function normalizeAllowedDesktopVersions(value: unknown): string[] | null { + if (!Array.isArray(value)) { + return null + } + + const versions = [...new Set(value.map((entry) => normalizeDesktopVersionString(entry)).filter((entry): entry is string => Boolean(entry)))] + return versions +} + +function sameStringArray(left: string[] | null, right: string[] | null) { + if (left === right) { + return true + } + + if (!left || !right || left.length !== right.length) { + return false + } + + return left.every((entry, index) => right[index] === entry) +} + +function parseMetadata(input: OrganizationMetadataInput): Record { + if (!input) { + return {} + } + + if (typeof input === "string") { + try { + const parsed = JSON.parse(input) as unknown + return isRecord(parsed) ? parsed : {} + } catch { + return {} + } + } + + return isRecord(input) ? input : {} +} + +export function normalizeOrganizationMetadata(input: OrganizationMetadataInput): { + metadata: OrganizationMetadata + changed: boolean +} { + const parsed = parseMetadata(input) + const rawLimits = isRecord(parsed.limits) ? parsed.limits : null + const allowedDesktopVersions = normalizeAllowedDesktopVersions(parsed.allowedDesktopVersions) + const members = normalizePositiveInteger(rawLimits?.members, DEFAULT_ORGANIZATION_LIMITS.members) + const workers = normalizePositiveInteger(rawLimits?.workers ?? rawLimits?.Workers, DEFAULT_ORGANIZATION_LIMITS.workers) + + const metadata: OrganizationMetadata = { + ...parsed, + limits: { + members, + workers, + }, + ...(allowedDesktopVersions !== null ? { allowedDesktopVersions } : {}), + } as OrganizationMetadata + + if (allowedDesktopVersions === null) { + delete metadata.allowedDesktopVersions + } + + const rawAllowedDesktopVersions = Array.isArray(parsed.allowedDesktopVersions) + ? parsed.allowedDesktopVersions.filter((entry): entry is string => typeof entry === "string") + : null + + const changed = + !isRecord(parsed.limits) || + Object.keys(parsed).length === 0 || + rawLimits?.members !== members || + (rawLimits?.workers ?? rawLimits?.Workers) !== workers || + !sameStringArray(rawAllowedDesktopVersions, allowedDesktopVersions) + + return { metadata, changed } +} + +export function serializeOrganizationMetadata(metadata: OrganizationMetadataInput) { + const parsed = parseMetadata(metadata) + return Object.keys(parsed).length > 0 ? JSON.stringify(parsed) : null +} + +export async function getOrInitializeOrganizationMetadata(organizationId: OrganizationId) { + const rows = await db + .select({ metadata: OrganizationTable.metadata }) + .from(OrganizationTable) + .where(eq(OrganizationTable.id, organizationId)) + .limit(1) + + const { metadata, changed } = normalizeOrganizationMetadata(rows[0]?.metadata) + if (changed) { + await db + .update(OrganizationTable) + .set({ metadata }) + .where(eq(OrganizationTable.id, organizationId)) + } + + return metadata +} + +async function countOrganizationMembers(organizationId: OrganizationId) { + const rows = await db + .select({ count: sql`count(*)` }) + .from(MemberTable) + .where(eq(MemberTable.organizationId, organizationId)) + + return Number(rows[0]?.count ?? 0) +} + +async function countPendingOrganizationInvitations(organizationId: OrganizationId) { + const rows = await db + .select({ count: sql`count(*)` }) + .from(InvitationTable) + .where(and(eq(InvitationTable.organizationId, organizationId), eq(InvitationTable.status, "pending"), gt(InvitationTable.expiresAt, new Date()))) + + return Number(rows[0]?.count ?? 0) +} + +async function countOrganizationWorkers(organizationId: OrganizationId) { + const rows = await db + .select({ count: sql`count(*)` }) + .from(WorkerTable) + .where(eq(WorkerTable.org_id, organizationId)) + + return Number(rows[0]?.count ?? 0) +} + +export async function getOrganizationLimitStatus(organizationId: OrganizationId, limitType: OrganizationLimitType) { + const metadata = await getOrInitializeOrganizationMetadata(organizationId) + const currentCount = + limitType === "members" + ? (await countOrganizationMembers(organizationId)) + (await countPendingOrganizationInvitations(organizationId)) + : await countOrganizationWorkers(organizationId) + + const limit = metadata.limits[limitType] + + return { + metadata, + currentCount, + limit, + exceeded: currentCount >= limit, + } +} diff --git a/ee/apps/den-api/src/orgs.ts b/ee/apps/den-api/src/orgs.ts new file mode 100644 index 0000000000..179622021a --- /dev/null +++ b/ee/apps/den-api/src/orgs.ts @@ -0,0 +1,959 @@ +import { and, asc, eq, inArray } from "@openwork-ee/den-db/drizzle" +import { + AuthSessionTable, + AuthUserTable, + InvitationTable, + MemberTable, + OrganizationRoleTable, + OrganizationTable, + TeamMemberTable, + TeamTable, +} from "@openwork-ee/den-db/schema" +import { normalizeDesktopAppRestrictions, type DesktopAppRestrictions } from "@openwork/types/den/desktop-app-restrictions" +import { createDenTypeId, normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import { db } from "./db.js" +import { DEFAULT_ORGANIZATION_LIMITS, normalizeOrganizationMetadata, serializeOrganizationMetadata } from "./organization-limits.js" +import { denDefaultDynamicOrganizationRoles, denOrganizationStaticRoles } from "./organization-access.js" + +type UserId = typeof AuthUserTable.$inferSelect.id +type SessionId = typeof AuthSessionTable.$inferSelect.id +type OrgId = typeof OrganizationTable.$inferSelect.id +type MemberRow = typeof MemberTable.$inferSelect +type MemberId = MemberRow["id"] +type InvitationRow = typeof InvitationTable.$inferSelect +export type AllowedEmailDomains = string[] | null + +export type InvitationStatus = "pending" | "accepted" | "canceled" | "expired" + +export type InvitationPreview = { + invitation: { + id: string + email: string + role: string + status: InvitationStatus + expiresAt: Date + createdAt: Date + } + organization: { + id: OrgId + name: string + slug: string + allowedEmailDomains: AllowedEmailDomains + } +} + +export type UserOrgSummary = { + id: OrgId + name: string + slug: string + logo: string | null + metadata: string | null + role: string + orgMemberId: string + membershipId: string + createdAt: Date + updatedAt: Date +} + +export type OrganizationContext = { + organization: { + id: OrgId + name: string + slug: string + logo: string | null + allowedEmailDomains: AllowedEmailDomains + desktopAppRestrictions: DesktopAppRestrictions + metadata: string | null + createdAt: Date + updatedAt: Date + } + currentMember: { + id: MemberId + userId: UserId + role: string + createdAt: Date + isOwner: boolean + } + members: Array<{ + id: MemberId + userId: UserId + role: string + createdAt: Date + isOwner: boolean + user: { + id: UserId + email: string + name: string + image: string | null + } + }> + invitations: Array<{ + id: string + email: string + role: string + status: string + expiresAt: Date + createdAt: Date + }> + roles: Array<{ + id: string + role: string + permission: Record + builtIn: boolean + protected: boolean + createdAt: Date | null + updatedAt: Date | null + }> + teams: Array<{ + id: typeof TeamTable.$inferSelect.id + name: string + createdAt: Date + updatedAt: Date + memberIds: MemberId[] + }> +} + +export type MemberTeamSummary = { + id: typeof TeamTable.$inferSelect.id + name: string + organizationId: typeof TeamTable.$inferSelect.organizationId + createdAt: Date + updatedAt: Date +} + +function splitRoles(value: string) { + return value + .split(",") + .map((entry) => entry.trim()) + .filter(Boolean) +} + +function hasRole(roleValue: string, roleName: string) { + return splitRoles(roleValue).includes(roleName) +} + +export function roleIncludesOwner(roleValue: string) { + return hasRole(roleValue, "owner") +} + +function titleCase(value: string) { + return value + .split(/\s+/) + .filter(Boolean) + .map((part) => `${part.slice(0, 1).toUpperCase()}${part.slice(1)}`) + .join(" ") +} + +function buildPersonalOrgName(input: { + name?: string | null + email?: string | null +}) { + const normalizedName = input.name?.trim() + if (normalizedName) { + return `${normalizedName}'s Org` + } + + const localPart = input.email?.split("@")[0] ?? "Personal" + const normalized = titleCase(localPart.replace(/[._-]+/g, " ").trim()) || "Personal" + const suffix = normalized.endsWith("s") ? "' Org" : "'s Org" + return `${normalized}${suffix}` +} + +function normalizeEmailDomainValue(value: string) { + const normalized = value.trim().toLowerCase().replace(/^@+/, "") + if (!normalized) { + return null + } + + if (!/^[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)+$/.test(normalized)) { + return null + } + + return normalized +} + +export function normalizeAllowedEmailDomains(input: readonly string[] | null | undefined): { + domains: AllowedEmailDomains + invalidDomains: string[] +} { + if (!input || input.length === 0) { + return { + domains: null, + invalidDomains: [], + } + } + + const normalized = new Set() + const invalidDomains: string[] = [] + + for (const value of input) { + const nextDomain = normalizeEmailDomainValue(value) + if (!nextDomain) { + invalidDomains.push(value) + continue + } + normalized.add(nextDomain) + } + + return { + domains: normalized.size > 0 ? [...normalized].sort() : null, + invalidDomains, + } +} + +function getEmailDomain(email: string) { + const normalized = email.trim().toLowerCase() + const atIndex = normalized.lastIndexOf("@") + if (atIndex === -1 || atIndex + 1 >= normalized.length) { + return null + } + return normalized.slice(atIndex + 1) +} + +export function isEmailAllowedForOrganization(allowedEmailDomains: readonly string[] | null | undefined, email: string) { + if (!allowedEmailDomains || allowedEmailDomains.length === 0) { + return true + } + + const emailDomain = getEmailDomain(email) + if (!emailDomain) { + return false + } + + return allowedEmailDomains.includes(emailDomain) +} + +function normalizeStoredAllowedEmailDomains(value: unknown): AllowedEmailDomains { + const values = Array.isArray(value) ? value.filter((entry): entry is string => typeof entry === "string") : null + return normalizeAllowedEmailDomains(values).domains +} + +export function parsePermissionRecord(value: string | null) { + if (!value) { + return {} + } + + try { + const parsed = JSON.parse(value) as Record + return Object.fromEntries( + Object.entries(parsed) + .filter((entry): entry is [string, unknown[]] => Array.isArray(entry[1])) + .map(([resource, actions]) => [ + resource, + actions.filter((entry: unknown): entry is string => typeof entry === "string"), + ]), + ) + } catch { + return {} + } +} + +export function serializePermissionRecord(value: Record) { + return JSON.stringify(value) +} + +export class OrganizationEmailDomainRestrictionError extends Error { + readonly emailDomain: string | null + readonly allowedEmailDomains: string[] + + constructor(email: string, allowedEmailDomains: string[]) { + const emailDomain = getEmailDomain(email) + super( + allowedEmailDomains.length === 1 + ? `This workspace only allows ${allowedEmailDomains[0]} email addresses.` + : `This workspace only allows email addresses from these domains: ${allowedEmailDomains.join(", ")}.`, + ) + this.name = "OrganizationEmailDomainRestrictionError" + this.emailDomain = emailDomain + this.allowedEmailDomains = allowedEmailDomains + } +} + +function clonePermissionRecord(value: Record) { + return Object.fromEntries( + Object.entries(value).map(([resource, actions]) => [resource, [...actions]]), + ) as Record +} + +async function listMembershipRows(userId: UserId) { + return db + .select() + .from(MemberTable) + .where(eq(MemberTable.userId, userId)) + .orderBy(asc(MemberTable.createdAt)) +} + +function getInvitationStatus(invitation: Pick): InvitationStatus { + if (invitation.status !== "pending") { + return invitation.status as Exclude + } + + return invitation.expiresAt > new Date() ? "pending" : "expired" +} + +async function getInvitationById(invitationIdRaw: string) { + let invitationId + try { + invitationId = normalizeDenTypeId("invitation", invitationIdRaw) + } catch { + return null + } + + const rows = await db + .select() + .from(InvitationTable) + .where(eq(InvitationTable.id, invitationId)) + .limit(1) + + return rows[0] ?? null +} + +async function ensureDefaultDynamicRoles(orgId: OrgId) { + for (const [role, permission] of Object.entries(denDefaultDynamicOrganizationRoles)) { + const serializedPermission = serializePermissionRecord(clonePermissionRecord(permission)) + await db + .insert(OrganizationRoleTable) + .values({ + id: createDenTypeId("organizationRole"), + organizationId: orgId, + role, + permission: serializedPermission, + }) + .onDuplicateKeyUpdate({ + set: { + permission: serializedPermission, + }, + }) + } +} + +function normalizeAssignableRole(input: string, availableRoles: Set) { + const roles = splitRoles(input).filter((role) => availableRoles.has(role)) + if (roles.length === 0) { + return "member" + } + return roles.join(",") +} + +export async function listAssignableRoles(orgId: OrgId) { + await ensureDefaultDynamicRoles(orgId) + + const rows = await db + .select({ role: OrganizationRoleTable.role }) + .from(OrganizationRoleTable) + .where(eq(OrganizationRoleTable.organizationId, orgId)) + + return new Set(rows.map((row) => row.role)) +} + +async function insertMemberIfMissing(input: { + organizationId: OrgId + userId: UserId + role: string +}) { + const existing = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.organizationId, input.organizationId), eq(MemberTable.userId, input.userId))) + .limit(1) + + if (existing.length > 0) { + return existing[0] + } + + await db.insert(MemberTable).values({ + id: createDenTypeId("member"), + organizationId: input.organizationId, + userId: input.userId, + role: input.role, + }) + + const created = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.organizationId, input.organizationId), eq(MemberTable.userId, input.userId))) + .limit(1) + + if (!created[0]) { + throw new Error("failed_to_create_member") + } + + return created[0] +} + +async function acceptInvitation(invitation: InvitationRow, userId: UserId) { + const availableRoles = await listAssignableRoles(invitation.organizationId) + const role = normalizeAssignableRole(invitation.role, availableRoles) + + const member = await insertMemberIfMissing({ + organizationId: invitation.organizationId, + userId, + role, + }) + + if (invitation.teamId) { + const teams = await db + .select({ id: TeamTable.id }) + .from(TeamTable) + .where(eq(TeamTable.id, invitation.teamId)) + .limit(1) + + if (teams[0]) { + const existingTeamMember = await db + .select({ id: TeamMemberTable.id }) + .from(TeamMemberTable) + .where(and(eq(TeamMemberTable.teamId, invitation.teamId), eq(TeamMemberTable.orgMembershipId, member.id))) + .limit(1) + + if (!existingTeamMember[0]) { + await db.insert(TeamMemberTable).values({ + id: createDenTypeId("teamMember"), + teamId: invitation.teamId, + orgMembershipId: member.id, + }) + } + } + } + + await db + .update(InvitationTable) + .set({ status: "accepted" }) + .where(eq(InvitationTable.id, invitation.id)) + + return member +} + +export async function acceptInvitationForUser(input: { + userId: UserId + email: string + invitationId: string | null +}) { + if (!input.invitationId) { + return null + } + + const invitation = await getInvitationById(input.invitationId) + + if (!invitation) { + return null + } + + if (invitation.email.trim().toLowerCase() !== input.email.trim().toLowerCase()) { + return null + } + + if (getInvitationStatus(invitation) !== "pending") { + return null + } + + const organizationRows = await db + .select({ allowedEmailDomains: OrganizationTable.allowedEmailDomains }) + .from(OrganizationTable) + .where(eq(OrganizationTable.id, invitation.organizationId)) + .limit(1) + + const allowedEmailDomains = normalizeStoredAllowedEmailDomains(organizationRows[0]?.allowedEmailDomains) + if (!isEmailAllowedForOrganization(allowedEmailDomains, input.email)) { + throw new OrganizationEmailDomainRestrictionError(input.email, allowedEmailDomains ?? []) + } + + const member = await acceptInvitation(invitation, input.userId) + return { + invitation, + member, + } +} + +export async function getInvitationPreview(invitationIdRaw: string): Promise { + let invitationId + try { + invitationId = normalizeDenTypeId("invitation", invitationIdRaw) + } catch { + return null + } + + const rows = await db + .select({ + invitation: { + id: InvitationTable.id, + email: InvitationTable.email, + role: InvitationTable.role, + status: InvitationTable.status, + expiresAt: InvitationTable.expiresAt, + createdAt: InvitationTable.createdAt, + }, + organization: { + id: OrganizationTable.id, + name: OrganizationTable.name, + slug: OrganizationTable.slug, + allowedEmailDomains: OrganizationTable.allowedEmailDomains, + }, + }) + .from(InvitationTable) + .innerJoin(OrganizationTable, eq(InvitationTable.organizationId, OrganizationTable.id)) + .where(eq(InvitationTable.id, invitationId)) + .limit(1) + + const row = rows[0] + if (!row) { + return null + } + + return { + invitation: { + ...row.invitation, + status: getInvitationStatus(row.invitation), + }, + organization: { + ...row.organization, + allowedEmailDomains: normalizeStoredAllowedEmailDomains(row.organization.allowedEmailDomains), + }, + } +} + +async function createOrganizationRecord(input: { + userId: UserId + name: string + logo?: string | null + metadata?: Record | null +}) { + const organizationId = createDenTypeId("organization") + const metadata = + input.metadata ?? { + limits: { + members: DEFAULT_ORGANIZATION_LIMITS.members, + workers: DEFAULT_ORGANIZATION_LIMITS.workers, + }, + } + + await db.insert(OrganizationTable).values({ + id: organizationId, + name: input.name, + slug: organizationId, + logo: input.logo ?? null, + metadata, + }) + + await db.insert(MemberTable).values({ + id: createDenTypeId("member"), + organizationId, + userId: input.userId, + role: "owner", + }) + + await ensureDefaultDynamicRoles(organizationId) + + return organizationId +} + +export async function ensureUserOrgAccess(input: { + userId: UserId +}) { + const memberships = await listMembershipRows(input.userId) + if (memberships.length > 0) { + const organizationIds = [...new Set(memberships.map((membership) => membership.organizationId))] + await Promise.all(organizationIds.map((organizationId) => ensureDefaultDynamicRoles(organizationId))) + return memberships[0].organizationId + } + + return null +} + +export async function ensurePersonalOrganizationForUser(userId: UserId) { + const existingOrgId = await ensureUserOrgAccess({ userId }) + if (existingOrgId) { + return existingOrgId + } + + const userRows = await db + .select({ + name: AuthUserTable.name, + email: AuthUserTable.email, + }) + .from(AuthUserTable) + .where(eq(AuthUserTable.id, userId)) + .limit(1) + + const user = userRows[0] + const organizationId = await createOrganizationRecord({ + userId, + name: buildPersonalOrgName({ + name: user?.name, + email: user?.email, + }), + }) + + return organizationId +} + +export async function createOrganizationForUser(input: { + userId: UserId + name: string +}) { + return createOrganizationRecord({ + userId: input.userId, + name: input.name.trim(), + }) +} + +export async function updateOrganizationName(input: { + organizationId: OrgId + name: string +}) { + return updateOrganizationSettings({ + organizationId: input.organizationId, + name: input.name, + }) +} + +export async function updateOrganizationSettings(input: { + organizationId: OrgId + name?: string + allowedEmailDomains?: readonly string[] | null + desktopAppRestrictions?: DesktopAppRestrictions + allowedDesktopVersions?: readonly string[] | null +}) { + const nextName = typeof input.name === "string" ? input.name.trim() : null + if (typeof input.name === "string" && !nextName) { + return null + } + + const updates: Partial = {} + if (nextName) { + updates.name = nextName + } + if (input.allowedEmailDomains !== undefined) { + updates.allowedEmailDomains = normalizeAllowedEmailDomains(input.allowedEmailDomains).domains + } + if (input.desktopAppRestrictions !== undefined) { + updates.desktopAppRestrictions = normalizeDesktopAppRestrictions(input.desktopAppRestrictions) + } + if (input.allowedDesktopVersions !== undefined) { + const rows = await db + .select({ metadata: OrganizationTable.metadata }) + .from(OrganizationTable) + .where(eq(OrganizationTable.id, input.organizationId)) + .limit(1) + + const existingOrganization = rows[0] + if (!existingOrganization) { + return null + } + + const nextMetadata = { + ...normalizeOrganizationMetadata(existingOrganization.metadata).metadata, + } as Record + + if (input.allowedDesktopVersions === null) { + delete nextMetadata.allowedDesktopVersions + } else { + nextMetadata.allowedDesktopVersions = input.allowedDesktopVersions + } + + updates.metadata = normalizeOrganizationMetadata(nextMetadata).metadata + } + + if (Object.keys(updates).length === 0) { + return null + } + + await db + .update(OrganizationTable) + .set(updates) + .where(eq(OrganizationTable.id, input.organizationId)) + + const rows = await db + .select() + .from(OrganizationTable) + .where(eq(OrganizationTable.id, input.organizationId)) + .limit(1) + + return rows[0] ?? null +} + +export async function seedDefaultOrganizationRoles(orgId: OrgId) { + await ensureDefaultDynamicRoles(orgId) +} + +export async function setSessionActiveOrganization(sessionId: SessionId, organizationId: OrgId | null) { + await db + .update(AuthSessionTable) + .set({ activeOrganizationId: organizationId }) + .where(eq(AuthSessionTable.id, sessionId)) +} + +export async function listUserOrgs(userId: UserId) { + const memberships = await db + .select({ + membershipId: MemberTable.id, + role: MemberTable.role, + organization: { + id: OrganizationTable.id, + name: OrganizationTable.name, + slug: OrganizationTable.slug, + logo: OrganizationTable.logo, + allowedEmailDomains: OrganizationTable.allowedEmailDomains, + desktopAppRestrictions: OrganizationTable.desktopAppRestrictions, + metadata: OrganizationTable.metadata, + createdAt: OrganizationTable.createdAt, + updatedAt: OrganizationTable.updatedAt, + }, + }) + .from(MemberTable) + .innerJoin(OrganizationTable, eq(MemberTable.organizationId, OrganizationTable.id)) + .where(eq(MemberTable.userId, userId)) + .orderBy(asc(MemberTable.createdAt)) + + return memberships.map((row) => ({ + id: row.organization.id, + name: row.organization.name, + slug: row.organization.slug, + logo: row.organization.logo, + allowedEmailDomains: normalizeStoredAllowedEmailDomains(row.organization.allowedEmailDomains), + desktopAppRestrictions: normalizeDesktopAppRestrictions(row.organization.desktopAppRestrictions), + metadata: serializeOrganizationMetadata(row.organization.metadata), + role: row.role, + orgMemberId: row.membershipId, + membershipId: row.membershipId, + createdAt: row.organization.createdAt, + updatedAt: row.organization.updatedAt, + })) satisfies UserOrgSummary[] +} + +export async function resolveUserOrganizations(input: { + activeOrganizationId?: string | null + userId: UserId +}) { + await ensureUserOrgAccess({ userId: input.userId }) + + const orgs = await listUserOrgs(input.userId) + + const availableOrgIds = new Set(orgs.map((org) => org.id)) + + let activeOrgId: OrgId | null = null + if (input.activeOrganizationId) { + try { + const normalized = normalizeDenTypeId("organization", input.activeOrganizationId) + if (availableOrgIds.has(normalized)) { + activeOrgId = normalized + } + } catch { + activeOrgId = null + } + } + + activeOrgId ??= orgs[0]?.id ?? null + + const activeOrg = orgs.find((org) => org.id === activeOrgId) ?? null + + return { + orgs, + activeOrgId, + activeOrgSlug: activeOrg?.slug ?? null, + } +} + +export async function getOrganizationContextForUser(input: { + userId: UserId + organizationId: OrgId +}) { + const organizationRows = await db + .select() + .from(OrganizationTable) + .where(eq(OrganizationTable.id, input.organizationId)) + .limit(1) + + const organization = organizationRows[0] + if (!organization) { + return null + } + + const currentMemberRows = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.organizationId, organization.id), eq(MemberTable.userId, input.userId))) + .limit(1) + + const currentMember = currentMemberRows[0] + if (!currentMember) { + return null + } + + await ensureDefaultDynamicRoles(organization.id) + + const members = await db + .select({ + id: MemberTable.id, + userId: MemberTable.userId, + role: MemberTable.role, + createdAt: MemberTable.createdAt, + user: { + id: AuthUserTable.id, + email: AuthUserTable.email, + name: AuthUserTable.name, + image: AuthUserTable.image, + }, + }) + .from(MemberTable) + .innerJoin(AuthUserTable, eq(MemberTable.userId, AuthUserTable.id)) + .where(eq(MemberTable.organizationId, organization.id)) + .orderBy(asc(MemberTable.createdAt)) + + const invitations = await db + .select({ + id: InvitationTable.id, + email: InvitationTable.email, + role: InvitationTable.role, + status: InvitationTable.status, + expiresAt: InvitationTable.expiresAt, + createdAt: InvitationTable.createdAt, + }) + .from(InvitationTable) + .where(eq(InvitationTable.organizationId, organization.id)) + .orderBy(asc(InvitationTable.createdAt)) + + const dynamicRoles = await db + .select() + .from(OrganizationRoleTable) + .where(eq(OrganizationRoleTable.organizationId, organization.id)) + .orderBy(asc(OrganizationRoleTable.createdAt)) + + const teams = await listOrganizationTeams(organization.id) + + const builtInDynamicRoleNames = new Set(Object.keys(denDefaultDynamicOrganizationRoles)) + + return { + organization: { + id: organization.id, + name: organization.name, + slug: organization.slug, + logo: organization.logo, + allowedEmailDomains: normalizeStoredAllowedEmailDomains(organization.allowedEmailDomains), + desktopAppRestrictions: normalizeDesktopAppRestrictions(organization.desktopAppRestrictions), + metadata: serializeOrganizationMetadata(organization.metadata), + createdAt: organization.createdAt, + updatedAt: organization.updatedAt, + }, + currentMember: { + id: currentMember.id, + userId: currentMember.userId, + role: currentMember.role, + createdAt: currentMember.createdAt, + isOwner: roleIncludesOwner(currentMember.role), + }, + members: members.map((member) => ({ + ...member, + isOwner: roleIncludesOwner(member.role), + })), + invitations, + roles: [ + { + id: "builtin-owner", + role: "owner", + permission: clonePermissionRecord(denOrganizationStaticRoles.owner.statements), + builtIn: true, + protected: true, + createdAt: null, + updatedAt: null, + }, + ...dynamicRoles.map((role) => ({ + id: role.id, + role: role.role, + permission: parsePermissionRecord(role.permission), + builtIn: builtInDynamicRoleNames.has(role.role), + protected: false, + createdAt: role.createdAt, + updatedAt: role.updatedAt, + })), + ], + teams, + } satisfies OrganizationContext +} + +async function listOrganizationTeams(organizationId: OrgId) { + const teams = await db + .select({ + id: TeamTable.id, + name: TeamTable.name, + createdAt: TeamTable.createdAt, + updatedAt: TeamTable.updatedAt, + }) + .from(TeamTable) + .where(eq(TeamTable.organizationId, organizationId)) + .orderBy(asc(TeamTable.createdAt)) + + if (teams.length === 0) { + return [] + } + + const memberships = await db + .select({ + teamId: TeamMemberTable.teamId, + orgMembershipId: TeamMemberTable.orgMembershipId, + }) + .from(TeamMemberTable) + .where(inArray(TeamMemberTable.teamId, teams.map((team) => team.id))) + + const memberIdsByTeamId = new Map() + for (const membership of memberships) { + const existing = memberIdsByTeamId.get(membership.teamId) ?? [] + existing.push(membership.orgMembershipId) + memberIdsByTeamId.set(membership.teamId, existing) + } + + return teams.map((team) => ({ + ...team, + memberIds: memberIdsByTeamId.get(team.id) ?? [], + })) +} + +export async function listTeamsForMember(input: { + organizationId: OrgId + memberId: MemberRow["id"] +}) { + return db + .select({ + id: TeamTable.id, + name: TeamTable.name, + organizationId: TeamTable.organizationId, + createdAt: TeamTable.createdAt, + updatedAt: TeamTable.updatedAt, + }) + .from(TeamMemberTable) + .innerJoin(TeamTable, eq(TeamMemberTable.teamId, TeamTable.id)) + .where(and(eq(TeamTable.organizationId, input.organizationId), eq(TeamMemberTable.orgMembershipId, input.memberId))) + .orderBy(asc(TeamTable.createdAt)) +} + +export async function removeOrganizationMember(input: { + organizationId: OrgId + memberId: MemberRow["id"] +}) { + const memberRows = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.id, input.memberId), eq(MemberTable.organizationId, input.organizationId))) + .limit(1) + + const member = memberRows[0] ?? null + if (!member) { + return null + } + + const teams = await db + .select({ id: TeamTable.id }) + .from(TeamTable) + .where(eq(TeamTable.organizationId, input.organizationId)) + + await db.transaction(async (tx) => { + for (const team of teams) { + await tx + .delete(TeamMemberTable) + .where(and(eq(TeamMemberTable.teamId, team.id), eq(TeamMemberTable.orgMembershipId, member.id))) + } + + await tx.delete(MemberTable).where(eq(MemberTable.id, member.id)) + }) + + return member +} diff --git a/ee/apps/den-api/src/routes/README.md b/ee/apps/den-api/src/routes/README.md new file mode 100644 index 0000000000..9976abedd9 --- /dev/null +++ b/ee/apps/den-api/src/routes/README.md @@ -0,0 +1,23 @@ +# Routes + +This folder groups Den API endpoints by product surface instead of keeping one large router file. + +## Layout + +- `auth/`: Better Auth mount and desktop handoff routes +- `me/`: current-user routes that describe the signed-in user and their org access +- `org/`: organization routes split into focused files by concern +- `admin/`: admin-only operational endpoints +- `version/`: public app version metadata for desktop update checks +- `workers/`: worker lifecycle, runtime, billing, and heartbeat routes + +## Conventions + +- Each route area exports a single `register...Routes()` function from its `index.ts` +- Request validation should use Hono Zod validators from `src/middleware/index.ts` +- Shared auth/org/team context should come from `src/middleware/index.ts`, not from ad hoc request parsing +- New route areas should get their own folder plus a local `README.md` + +## Why this exists + +Agents often need to change one endpoint family quickly. Keeping route areas isolated makes it easier to understand ownership and avoid accidental cross-surface regressions. diff --git a/ee/apps/den-api/src/routes/admin/README.md b/ee/apps/den-api/src/routes/admin/README.md new file mode 100644 index 0000000000..517fc5e538 --- /dev/null +++ b/ee/apps/den-api/src/routes/admin/README.md @@ -0,0 +1,21 @@ +# Admin Routes + +This folder owns admin-only Den API surfaces. + +## Files + +- `index.ts`: currently registers the admin overview endpoint + +## Current routes + +- `GET /v1/admin/overview` + +## Expectations + +- Gate all routes with `requireAdminMiddleware` +- Keep admin reporting logic here instead of mixing it into auth or org routes +- Prefer query validators for report flags such as `includeBilling` + +## Notes + +This area is intentionally small for now, but it is its own folder so future admin/reporting endpoints have a clear home. diff --git a/ee/apps/den-api/src/routes/admin/index.ts b/ee/apps/den-api/src/routes/admin/index.ts new file mode 100644 index 0000000000..186686a28c --- /dev/null +++ b/ee/apps/den-api/src/routes/admin/index.ts @@ -0,0 +1,322 @@ +import { asc, desc, eq, isNotNull, sql } from "@openwork-ee/den-db/drizzle" +import { AuthAccountTable, AuthSessionTable, AuthUserTable, WorkerTable, AdminAllowlistTable } from "@openwork-ee/den-db/schema" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { getCloudWorkerAdminBillingStatus } from "../../billing/polar.js" +import { db } from "../../db.js" +import { queryValidator, requireAdminMiddleware } from "../../middleware/index.js" +import { denTypeIdSchema, invalidRequestSchema, jsonResponse, unauthorizedSchema } from "../../openapi.js" +import type { AuthContextVariables } from "../../session.js" + +type UserId = typeof AuthUserTable.$inferSelect.id + +const overviewQuerySchema = z.object({ + includeBilling: z.string().optional(), +}) + +const adminOverviewResponseSchema = z.object({ + viewer: z.object({ + id: denTypeIdSchema("user"), + email: z.string(), + name: z.string().nullable(), + }), + admins: z.array(z.object({}).passthrough()), + summary: z.object({}).passthrough(), + users: z.array(z.object({}).passthrough()), + generatedAt: z.string().datetime(), +}).meta({ ref: "AdminOverviewResponse" }) + +function normalizeEmail(value: string | null | undefined) { + return value?.trim().toLowerCase() ?? "" +} + +function toNumber(value: unknown) { + if (typeof value === "number" && Number.isFinite(value)) { + return value + } + + const parsed = Number(value) + return Number.isFinite(parsed) ? parsed : 0 +} + +function isWithinDays(value: Date | string | null, days: number) { + if (!value) { + return false + } + + const date = value instanceof Date ? value : new Date(value) + if (Number.isNaN(date.getTime())) { + return false + } + + const windowMs = days * 24 * 60 * 60 * 1000 + return Date.now() - date.getTime() <= windowMs +} + +function normalizeProvider(providerId: string) { + const normalized = providerId.trim().toLowerCase() + if (!normalized) { + return "unknown" + } + + if (normalized === "credential" || normalized === "email-password") { + return "email" + } + + return normalized +} + +function parseBooleanQuery(value: string | undefined): boolean { + if (!value) { + return false + } + + const normalized = value.trim().toLowerCase() + return normalized === "1" || normalized === "true" || normalized === "yes" +} + +async function mapWithConcurrency(items: T[], limit: number, mapper: (item: T) => Promise) { + if (items.length === 0) { + return [] as R[] + } + + const results = new Array(items.length) + let nextIndex = 0 + + async function runWorker() { + while (nextIndex < items.length) { + const currentIndex = nextIndex + nextIndex += 1 + results[currentIndex] = await mapper(items[currentIndex]) + } + } + + const workerCount = Math.max(1, Math.min(limit, items.length)) + await Promise.all(Array.from({ length: workerCount }, () => runWorker())) + return results +} + +export function registerAdminRoutes(app: Hono) { + app.get( + "/v1/admin/overview", + describeRoute({ + tags: ["Admin"], + summary: "Get admin overview", + description: "Returns a high-level administrative overview of users, sessions, workers, admins, and optional billing data for Den operations.", + responses: { + 200: jsonResponse("Administrative overview returned successfully.", adminOverviewResponseSchema), + 400: jsonResponse("The admin overview query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be an authenticated admin.", unauthorizedSchema), + }, + }), + requireAdminMiddleware, + queryValidator(overviewQuerySchema), + async (c) => { + const user = c.get("user") + const query = c.req.valid("query") + const includeBilling = parseBooleanQuery(query.includeBilling) + + const [admins, users, workerStatsRows, sessionStatsRows, accountRows] = await Promise.all([ + db + .select({ + email: AdminAllowlistTable.email, + note: AdminAllowlistTable.note, + createdAt: AdminAllowlistTable.created_at, + }) + .from(AdminAllowlistTable) + .orderBy(asc(AdminAllowlistTable.email)), + db.select().from(AuthUserTable).orderBy(desc(AuthUserTable.createdAt)), + db + .select({ + userId: WorkerTable.created_by_user_id, + workerCount: sql`count(*)`, + cloudWorkerCount: sql`sum(case when ${WorkerTable.destination} = 'cloud' then 1 else 0 end)`, + localWorkerCount: sql`sum(case when ${WorkerTable.destination} = 'local' then 1 else 0 end)`, + latestWorkerCreatedAt: sql`max(${WorkerTable.created_at})`, + }) + .from(WorkerTable) + .where(isNotNull(WorkerTable.created_by_user_id)) + .groupBy(WorkerTable.created_by_user_id), + db + .select({ + userId: AuthSessionTable.userId, + sessionCount: sql`count(*)`, + lastSeenAt: sql`max(${AuthSessionTable.updatedAt})`, + }) + .from(AuthSessionTable) + .groupBy(AuthSessionTable.userId), + db + .select({ + userId: AuthAccountTable.userId, + providerId: AuthAccountTable.providerId, + }) + .from(AuthAccountTable), + ]) + + const workerStatsByUser = new Map() + + for (const row of workerStatsRows) { + if (!row.userId) { + continue + } + + workerStatsByUser.set(row.userId, { + workerCount: toNumber(row.workerCount), + cloudWorkerCount: toNumber(row.cloudWorkerCount), + localWorkerCount: toNumber(row.localWorkerCount), + latestWorkerCreatedAt: row.latestWorkerCreatedAt, + }) + } + + const sessionStatsByUser = new Map() + + for (const row of sessionStatsRows) { + sessionStatsByUser.set(row.userId, { + sessionCount: toNumber(row.sessionCount), + lastSeenAt: row.lastSeenAt, + }) + } + + const providersByUser = new Map>() + for (const row of accountRows) { + const providerId = normalizeProvider(row.providerId) + const existing = providersByUser.get(row.userId) ?? new Set() + existing.add(providerId) + providersByUser.set(row.userId, existing) + } + + const defaultBilling = { + status: "unavailable" as const, + featureGateEnabled: false, + subscriptionId: null, + subscriptionStatus: null, + currentPeriodEnd: null, + source: "unavailable" as const, + note: "Billing lookup unavailable.", + } + + const billingRows = includeBilling + ? await mapWithConcurrency(users, 4, async (entry) => ({ + userId: entry.id, + billing: await getCloudWorkerAdminBillingStatus({ + userId: entry.id, + email: entry.email, + name: entry.name ?? entry.email, + }), + })) + : [] + + const billingByUser = new Map(billingRows.map((row) => [row.userId, row.billing])) + + const userRows = users.map((entry) => { + const workerStats = workerStatsByUser.get(entry.id) ?? { + workerCount: 0, + cloudWorkerCount: 0, + localWorkerCount: 0, + latestWorkerCreatedAt: null, + } + const sessionStats = sessionStatsByUser.get(entry.id) ?? { + sessionCount: 0, + lastSeenAt: null, + } + const authProviders = Array.from(providersByUser.get(entry.id) ?? []).sort() + + return { + id: entry.id, + name: entry.name, + email: entry.email, + emailVerified: entry.emailVerified, + createdAt: entry.createdAt, + updatedAt: entry.updatedAt, + lastSeenAt: sessionStats.lastSeenAt, + sessionCount: sessionStats.sessionCount, + authProviders, + workerCount: workerStats.workerCount, + cloudWorkerCount: workerStats.cloudWorkerCount, + localWorkerCount: workerStats.localWorkerCount, + latestWorkerCreatedAt: workerStats.latestWorkerCreatedAt, + billing: includeBilling ? billingByUser.get(entry.id) ?? defaultBilling : null, + } + }) + + const summary = userRows.reduce( + (accumulator, entry) => { + accumulator.totalUsers += 1 + accumulator.totalWorkers += entry.workerCount + accumulator.cloudWorkers += entry.cloudWorkerCount + accumulator.localWorkers += entry.localWorkerCount + + if (entry.emailVerified) { + accumulator.verifiedUsers += 1 + } + + if (entry.workerCount > 0) { + accumulator.usersWithWorkers += 1 + } + + if (includeBilling && entry.billing) { + if (entry.billing.status === "paid") { + accumulator.paidUsers += 1 + } else if (entry.billing.status === "unpaid") { + accumulator.unpaidUsers += 1 + } else { + accumulator.billingUnavailableUsers += 1 + } + } + + if (isWithinDays(entry.createdAt, 7)) { + accumulator.recentUsers7d += 1 + } + + if (isWithinDays(entry.createdAt, 30)) { + accumulator.recentUsers30d += 1 + } + + return accumulator + }, + { + totalUsers: 0, + verifiedUsers: 0, + recentUsers7d: 0, + recentUsers30d: 0, + totalWorkers: 0, + cloudWorkers: 0, + localWorkers: 0, + usersWithWorkers: 0, + paidUsers: 0, + unpaidUsers: 0, + billingUnavailableUsers: 0, + }, + ) + + return c.json({ + viewer: { + id: user.id, + email: normalizeEmail(user.email), + name: user.name, + }, + admins, + summary: { + ...summary, + adminCount: admins.length, + billingLoaded: includeBilling, + paidUsers: includeBilling ? summary.paidUsers : null, + unpaidUsers: includeBilling ? summary.unpaidUsers : null, + billingUnavailableUsers: includeBilling ? summary.billingUnavailableUsers : null, + usersWithoutWorkers: summary.totalUsers - summary.usersWithWorkers, + }, + users: userRows, + generatedAt: new Date().toISOString(), + }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/auth/README.md b/ee/apps/den-api/src/routes/auth/README.md new file mode 100644 index 0000000000..01716c0fd1 --- /dev/null +++ b/ee/apps/den-api/src/routes/auth/README.md @@ -0,0 +1,25 @@ +# Auth Routes + +This folder owns authentication-related HTTP surfaces. + +## Files + +- `index.ts`: mounts Better Auth at `/api/auth/*` and registers auth-specific route groups +- `desktop-handoff.ts`: desktop sign-in handoff flow under `/v1/auth/desktop-handoff*` + +## Current responsibilities + +- forward Better Auth requests to `auth.handler(c.req.raw)` +- create short-lived desktop handoff grants +- exchange a valid handoff grant for a session token + +## Expected dependencies + +- Better Auth configuration from `src/auth.ts` +- shared auth/session middleware from `src/session.ts` +- request validation from `src/middleware/index.ts` + +## Notes for future work + +- Keep browser auth routes mounted through Better Auth unless there is a strong reason to wrap them +- Put new auth-adjacent custom endpoints in this folder, not in `me/` or `org/` diff --git a/ee/apps/den-api/src/routes/auth/desktop-handoff.ts b/ee/apps/den-api/src/routes/auth/desktop-handoff.ts new file mode 100644 index 0000000000..a2c6347a74 --- /dev/null +++ b/ee/apps/den-api/src/routes/auth/desktop-handoff.ts @@ -0,0 +1,281 @@ +import { randomBytes } from "node:crypto" +import { and, eq, gt, isNull } from "@openwork-ee/den-db/drizzle" +import { AuthSessionTable, AuthUserTable, DesktopHandoffGrantTable } from "@openwork-ee/den-db/schema" +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { jsonValidator, requireUserMiddleware } from "../../middleware/index.js" +import { db } from "../../db.js" +import { denTypeIdSchema, invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import type { AuthContextVariables } from "../../session.js" + +const createGrantSchema = z.object({ + next: z.string().trim().max(128).optional(), + desktopScheme: z.string().trim().max(32).optional(), +}) + +const exchangeGrantSchema = z.object({ + grant: z.string().trim().min(12).max(128), +}) + +const desktopHandoffGrantResponseSchema = z.object({ + grant: z.string(), + expiresAt: z.string().datetime(), + openworkUrl: z.string().url(), +}).meta({ ref: "DesktopHandoffGrantResponse" }) + +const desktopHandoffExchangeResponseSchema = z.object({ + token: z.string(), + user: z.object({ + id: denTypeIdSchema("user"), + email: z.string().email(), + name: z.string().nullable(), + }), +}).meta({ ref: "DesktopHandoffExchangeResponse" }) + +const grantNotFoundSchema = z.object({ + error: z.literal("grant_not_found"), + message: z.string(), +}).meta({ ref: "DesktopHandoffGrantNotFoundError" }) + +function readSingleHeader(value: string | null) { + const first = value?.split(",")[0]?.trim() ?? "" + return first || null +} + +function isWebAppHost(hostname: string) { + const normalized = hostname.trim().toLowerCase() + + if ( + normalized === "localhost" + || normalized === "0.0.0.0" + || normalized === "::1" + || normalized === "[::1]" + || /^127(?:\.\d{1,3}){3}$/.test(normalized) + ) { + return true + } + + const ipv4Match = normalized.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/) + if (ipv4Match) { + const [first, second, third, fourth] = ipv4Match.slice(1).map(Number) + const octets = [first, second, third, fourth] + if (octets.every((octet) => Number.isInteger(octet) && octet >= 0 && octet <= 255)) { + if ( + first === 10 + || first === 127 + || (first === 172 && second >= 16 && second <= 31) + || (first === 192 && second === 168) + || (first === 169 && second === 254) + || (first === 100 && second >= 64 && second <= 127) + ) { + return true + } + } + } + + return normalized === "app.openworklabs.com" + || normalized === "app.openwork.software" + || normalized.startsWith("app.") +} + +function withDenProxyPath(origin: string) { + const url = new URL(origin) + const pathname = url.pathname.replace(/\/+$/, "") + if (pathname.toLowerCase().endsWith("/api/den")) { + return url.toString().replace(/\/+$/, "") + } + url.pathname = `${pathname}/api/den`.replace(/\/+/g, "/") + return url.toString().replace(/\/+$/, "") +} + +function resolveDesktopDenBaseUrl(request: Request) { + const originHeader = readSingleHeader(request.headers.get("origin")) + if (originHeader) { + try { + const originUrl = new URL(originHeader) + if ((originUrl.protocol === "https:" || originUrl.protocol === "http:") && isWebAppHost(originUrl.hostname)) { + return withDenProxyPath(originUrl.origin) + } + } catch { + // Ignore invalid origins. + } + } + + const forwardedProto = readSingleHeader(request.headers.get("x-forwarded-proto")) + const forwardedHost = readSingleHeader(request.headers.get("x-forwarded-host")) + const host = readSingleHeader(request.headers.get("host")) + const protocol = forwardedProto ?? new URL(request.url).protocol.replace(/:$/, "") + const targetHost = forwardedHost ?? host + if (!targetHost) { + return "https://app.openworklabs.com/api/den" + } + + const origin = `${protocol}://${targetHost}` + try { + const url = new URL(origin) + if (isWebAppHost(url.hostname)) { + return withDenProxyPath(url.origin) + } + } catch { + // Ignore invalid forwarded origins. + } + + return origin +} + +function buildOpenworkDeepLink(input: { + scheme?: string | null + grant: string + denBaseUrl: string +}) { + const requestedScheme = input.scheme?.trim() || "openwork" + const scheme = /^[a-z][a-z0-9+.-]*$/i.test(requestedScheme) + ? requestedScheme + : "openwork" + const url = new URL(`${scheme}://den-auth`) + url.searchParams.set("grant", input.grant) + url.searchParams.set("denBaseUrl", input.denBaseUrl) + return url.toString() +} + +export function registerDesktopAuthRoutes(app: Hono) { + app.post( + "/v1/auth/desktop-handoff", + describeRoute({ + hide: true, + tags: ["Authentication"], + summary: "Create desktop handoff grant", + description: "Creates a short-lived desktop handoff grant and deep link so a signed-in web user can continue the same account in the OpenWork desktop app.", + responses: { + 200: jsonResponse("Desktop handoff grant created successfully.", desktopHandoffGrantResponseSchema), + 400: jsonResponse("The handoff request body was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create a desktop handoff grant.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + jsonValidator(createGrantSchema), + async (c) => { + const user = c.get("user") + const session = c.get("session") + if (!user?.id || !session?.token) { + return c.json({ error: "unauthorized" }, 401) + } + + const input = c.req.valid("json") + + const grant = randomBytes(24).toString("base64url") + const expiresAt = new Date(Date.now() + 5 * 60 * 1000) + await db.insert(DesktopHandoffGrantTable).values({ + id: grant, + user_id: normalizeDenTypeId("user", user.id), + session_token: session.token, + expires_at: expiresAt, + consumed_at: null, + }) + + const denBaseUrl = resolveDesktopDenBaseUrl(c.req.raw) + + return c.json({ + grant, + expiresAt: expiresAt.toISOString(), + openworkUrl: buildOpenworkDeepLink({ + scheme: input.desktopScheme || "openwork", + grant, + denBaseUrl, + }), + }) + }, + ) + + app.post( + "/v1/auth/desktop-handoff/exchange", + describeRoute({ + hide: true, + tags: ["Authentication"], + summary: "Exchange desktop handoff grant", + description: "Exchanges a one-time desktop handoff grant for the user's session token and basic profile so the desktop app can sign the user in.", + responses: { + 200: jsonResponse("Desktop handoff grant exchanged successfully.", desktopHandoffExchangeResponseSchema), + 400: jsonResponse("The handoff exchange request body was invalid.", invalidRequestSchema), + 404: jsonResponse("The handoff grant was missing, expired, or already used.", grantNotFoundSchema), + }, + }), + jsonValidator(exchangeGrantSchema), + async (c) => { + const input = c.req.valid("json") + + const now = new Date() + const exchange = await db.transaction(async (tx) => { + const rows = await tx + .select({ + session: AuthSessionTable, + user: AuthUserTable, + }) + .from(DesktopHandoffGrantTable) + .innerJoin(AuthSessionTable, eq(DesktopHandoffGrantTable.session_token, AuthSessionTable.token)) + .innerJoin(AuthUserTable, eq(DesktopHandoffGrantTable.user_id, AuthUserTable.id)) + .where( + and( + eq(DesktopHandoffGrantTable.id, input.grant), + isNull(DesktopHandoffGrantTable.consumed_at), + gt(DesktopHandoffGrantTable.expires_at, now), + gt(AuthSessionTable.expiresAt, now), + ), + ) + .limit(1) + + const row = rows[0] + if (!row) { + return null + } + + const consumedAt = new Date() + await tx + .update(DesktopHandoffGrantTable) + .set({ consumed_at: consumedAt }) + .where( + and( + eq(DesktopHandoffGrantTable.id, input.grant), + isNull(DesktopHandoffGrantTable.consumed_at), + gt(DesktopHandoffGrantTable.expires_at, now), + ), + ) + + const claimed = await tx + .select({ id: DesktopHandoffGrantTable.id }) + .from(DesktopHandoffGrantTable) + .where( + and( + eq(DesktopHandoffGrantTable.id, input.grant), + eq(DesktopHandoffGrantTable.consumed_at, consumedAt), + ), + ) + .limit(1) + + if (!claimed[0]) { + return null + } + + return { + token: row.session.token, + user: { + id: row.user.id, + email: row.user.email, + name: row.user.name, + }, + } + }) + + if (!exchange) { + return c.json({ + error: "grant_not_found", + message: "This desktop sign-in link is missing, expired, or already used.", + }, 404) + } + + return c.json(exchange) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/auth/index.ts b/ee/apps/den-api/src/routes/auth/index.ts new file mode 100644 index 0000000000..5d51b76f68 --- /dev/null +++ b/ee/apps/den-api/src/routes/auth/index.ts @@ -0,0 +1,149 @@ +import { eq } from "@openwork-ee/den-db/drizzle" +import { OAuthClientTable } from "@openwork-ee/den-db/schema" +import type { Hono } from "hono" +import { oauthProviderAuthServerMetadata, oauthProviderOpenIdConfigMetadata } from "@better-auth/oauth-provider" +import { describeRoute } from "hono-openapi" +import { auth } from "../../auth.js" +import { db } from "../../db.js" +import { env } from "../../env.js" +import { emptyResponse } from "../../openapi.js" +import type { AuthContextVariables } from "../../session.js" +import { registerDesktopAuthRoutes } from "./desktop-handoff.js" + +function rewriteAuthRequest(request: Request, path: string) { + const url = new URL(request.url) + url.pathname = path + return new Request(url, request) +} + +async function rewriteMcpClientRegistrationRequest(request: Request, path: string) { + const url = new URL(request.url) + url.pathname = path + + const headers = new Headers(request.headers) + const contentType = headers.get("content-type")?.toLowerCase() ?? "" + if (!contentType.includes("application/json")) { + return new Request(url, request) + } + + const body = await request.json() as Record + const scope = typeof body.scope === "string" ? body.scope : "" + const scopes = new Set(scope.split(/\s+/).filter(Boolean)) + if (scopes.has("mcp:read") || scopes.has("mcp:write")) { + scopes.add("mcp:read") + scopes.add("mcp:write") + body.scope = Array.from(scopes).join(" ") + } + + headers.set("content-type", "application/json") + headers.delete("content-length") + + return new Request(url, { + method: request.method, + headers, + body: JSON.stringify(body), + }) +} + +async function rewriteMetadataOrigin(response: Response, origin: string) { + const metadata = await response.json() as Record + const headers = new Headers(response.headers) + headers.delete("content-length") + headers.set("content-type", "application/json") + + for (const [key, value] of Object.entries(metadata)) { + if (typeof value === "string") { + metadata[key] = value.replace(env.betterAuthUrl, origin) + } + } + + return new Response(JSON.stringify(metadata), { + status: response.status, + headers, + }) +} + +function requestOrigin(request: Request) { + return new URL(request.url).origin +} + +function readStoredClientScopes(scopes: string | null) { + if (!scopes) { + return [] + } + + try { + const parsed = JSON.parse(scopes) as unknown + if (Array.isArray(parsed)) return parsed.filter((entry): entry is string => typeof entry === "string") + } catch {} + + return scopes.split(/\s+/).filter(Boolean) +} + +async function ensureMcpClientScopes(request: Request) { + const url = new URL(request.url) + const requestedScopes = new Set((url.searchParams.get("scope") ?? "").split(/\s+/).filter(Boolean)) + if (!requestedScopes.has("mcp:read") && !requestedScopes.has("mcp:write")) { + return + } + + const clientId = url.searchParams.get("client_id") + if (!clientId) { + return + } + + const [client] = await db + .select({ scopes: OAuthClientTable.scopes }) + .from(OAuthClientTable) + .where(eq(OAuthClientTable.clientId, clientId)) + .limit(1) + if (!client) { + return + } + + const scopes = new Set(readStoredClientScopes(client.scopes)) + if (!scopes.has("mcp:read") && !scopes.has("mcp:write")) { + return + } + + scopes.add("mcp:read") + scopes.add("mcp:write") + await db + .update(OAuthClientTable) + .set({ scopes: JSON.stringify(Array.from(scopes)) }) + .where(eq(OAuthClientTable.clientId, clientId)) +} + +export function registerAuthRoutes(app: Hono) { + app.get("/api/auth/.well-known/oauth-authorization-server", async (c) => rewriteMetadataOrigin(await oauthProviderAuthServerMetadata(auth)(c.req.raw), requestOrigin(c.req.raw))) + app.get("/api/auth/.well-known/openid-configuration", async (c) => rewriteMetadataOrigin(await oauthProviderOpenIdConfigMetadata(auth)(c.req.raw), requestOrigin(c.req.raw))) + app.get("/.well-known/oauth-authorization-server/api/auth", async (c) => rewriteMetadataOrigin(await oauthProviderAuthServerMetadata(auth)(c.req.raw), requestOrigin(c.req.raw))) + app.get("/.well-known/openid-configuration/api/auth", async (c) => rewriteMetadataOrigin(await oauthProviderOpenIdConfigMetadata(auth)(c.req.raw), requestOrigin(c.req.raw))) + app.get("/.well-known/oauth-authorization-server", async (c) => rewriteMetadataOrigin(await oauthProviderAuthServerMetadata(auth)(rewriteAuthRequest(c.req.raw, "/api/auth/.well-known/oauth-authorization-server")), requestOrigin(c.req.raw))) + app.get("/.well-known/openid-configuration", async (c) => rewriteMetadataOrigin(await oauthProviderOpenIdConfigMetadata(auth)(rewriteAuthRequest(c.req.raw, "/api/auth/.well-known/openid-configuration")), requestOrigin(c.req.raw))) + app.post("/register", async (c) => auth.handler(await rewriteMcpClientRegistrationRequest(c.req.raw, "/api/auth/oauth2/register"))) + app.post("/api/auth/oauth2/register", async (c) => auth.handler(await rewriteMcpClientRegistrationRequest(c.req.raw, "/api/auth/oauth2/register"))) + app.get("/api/auth/oauth2/authorize", async (c) => { + await ensureMcpClientScopes(c.req.raw) + return auth.handler(c.req.raw) + }) + + app.on( + ["GET", "POST"], + "/api/auth/*", + describeRoute({ + hide: true, + tags: ["Authentication"], + summary: "Handle Better Auth flow", + description: "Proxies Better Auth sign-in, sign-out, session, and verification flows under the Den API auth namespace.", + responses: { + 200: emptyResponse("Better Auth handled the request successfully."), + 302: emptyResponse("Better Auth redirected the user to continue the auth flow."), + 400: emptyResponse("Better Auth rejected the request as invalid."), + 401: emptyResponse("Better Auth rejected the request because authentication failed."), + }, + }), + (c) => auth.handler(c.req.raw), + ) + registerDesktopAuthRoutes(app) +} diff --git a/ee/apps/den-api/src/routes/me/README.md b/ee/apps/den-api/src/routes/me/README.md new file mode 100644 index 0000000000..125806bb4b --- /dev/null +++ b/ee/apps/den-api/src/routes/me/README.md @@ -0,0 +1,23 @@ +# Me Routes + +This folder owns routes about the currently authenticated user. + +## Files + +- `index.ts`: registers `/v1/me` and `/v1/me/orgs` + +## Current responsibilities + +- return the current authenticated user/session payload +- resolve the orgs the current user belongs to +- expose active org selection data for the current session + +## Middleware expectations + +- use `requireUserMiddleware` when a route needs an authenticated user +- use `resolveUserOrganizationsMiddleware` when a route needs org membership context + +## Notes for future work + +- Keep this folder focused on the current actor, not arbitrary user admin operations +- If more current-user subareas appear later, split them into additional files inside this folder diff --git a/ee/apps/den-api/src/routes/me/index.ts b/ee/apps/den-api/src/routes/me/index.ts new file mode 100644 index 0000000000..47ec968f4e --- /dev/null +++ b/ee/apps/den-api/src/routes/me/index.ts @@ -0,0 +1,99 @@ +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { desktopConfigSchema } from "@openwork/types/den/desktop-app-restrictions" +import { z } from "zod" +import { requireUserMiddleware, resolveOrganizationContextMiddleware, resolveUserOrganizationsMiddleware, type OrganizationContextVariables, type UserOrganizationsContext } from "../../middleware/index.js" +import { denTypeIdSchema, jsonResponse, unauthorizedSchema } from "../../openapi.js" +import { normalizeOrganizationMetadata } from "../../organization-limits.js" +import type { AuthContextVariables } from "../../session.js" + +const meResponseSchema = z.object({ + user: z.object({}).passthrough(), + session: z.object({}).passthrough(), +}).meta({ ref: "CurrentUserResponse" }) + +const meOrganizationsResponseSchema = z.object({ + orgs: z.array(z.object({ + id: denTypeIdSchema("organization"), + isActive: z.boolean(), + }).passthrough()), + activeOrgId: denTypeIdSchema("organization").nullable(), + activeOrgSlug: z.string().nullable(), +}).meta({ ref: "CurrentUserOrganizationsResponse" }) + +const meDesktopConfigResponseSchema = desktopConfigSchema.meta({ + ref: "CurrentUserDesktopConfigResponse", +}) + +export function registerMeRoutes & Partial }>(app: Hono) { + app.get( + "/v1/me", + describeRoute({ + tags: ["Users"], + summary: "Get current user", + description: "Returns the currently authenticated user and active session details for the caller.", + responses: { + 200: jsonResponse("Current user and session returned successfully.", meResponseSchema), + 401: jsonResponse("The caller must be signed in to read profile data.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + (c) => { + return c.json({ + user: c.get("user"), + session: c.get("session"), + }) + }, + ) + + app.get( + "/v1/me/orgs", + describeRoute({ + tags: ["Users"], + summary: "List current user's organizations", + description: "Lists the organizations visible to the current user and marks which organization is currently active.", + responses: { + 200: jsonResponse("Current user organizations returned successfully.", meOrganizationsResponseSchema), + }, + }), + resolveUserOrganizationsMiddleware, + (c) => { + const orgs = (c.get("userOrganizations") ?? []) as NonNullable + + return c.json({ + orgs: orgs.map((org) => ({ + ...org, + isActive: org.id === c.get("activeOrganizationId"), + })), + activeOrgId: c.get("activeOrganizationId") ?? null, + activeOrgSlug: c.get("activeOrganizationSlug") ?? null, + }) + }, + ) + + app.get( + "/v1/me/desktop-config", + describeRoute({ + tags: ["Users"], + summary: "Get current user's desktop config", + description: "Returns the authenticated desktop app restrictions for the caller's active organization.", + responses: { + 200: jsonResponse("Current user desktop config returned successfully.", meDesktopConfigResponseSchema), + 401: jsonResponse("The caller must be signed in to read desktop config.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + (c) => { + const organization = c.get("organizationContext").organization + const metadata = normalizeOrganizationMetadata(organization.metadata).metadata + + return c.json({ + ...organization.desktopAppRestrictions, + ...(Array.isArray(metadata.allowedDesktopVersions) + ? { allowedDesktopVersions: metadata.allowedDesktopVersions } + : {}), + }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/README.md b/ee/apps/den-api/src/routes/org/README.md new file mode 100644 index 0000000000..139936c943 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/README.md @@ -0,0 +1,41 @@ +# Org Routes + +This folder owns organization-facing Den API routes. + +## Files + +- `index.ts`: registers all org route groups +- `core.ts`: org creation, invitation preview/accept, and org context +- `invitations.ts`: invitation creation and cancellation +- `members.ts`: member role updates and member removal +- `roles.ts`: dynamic role CRUD +- `templates.ts`: shared template CRUD +- `shared.ts`: shared route-local helpers, param schemas, and guard helpers + +## Active organization model + +- `POST /api/auth/organization/set-active` is the only Better Auth endpoint that should switch the user's active org explicitly. +- New sessions should get an initial `activeOrganizationId` from Better Auth session creation hooks in `src/auth.ts`. +- `GET /v1/org` returns the active organization from the current session, including a nested `organization.owner` object plus the current member and team context. +- `POST /v1/org` creates a new organization and switches the session to it. `PATCH /v1/org` updates the active organization. +- Active-org scoped resources should prefer top-level routes like `/v1/skills`, `/v1/teams`, `/v1/roles`, `/v1/api-keys`, `/v1/llm-providers`, and plugin-system `/v1/...` routes. They should not require `:orgId` or `:orgSlug` in the path. +- Routes under `/v1/orgs/**` are reserved for cross-org flows that are not tied to the active workspace yet, such as invitation preview/accept. +- If a client needs to change workspaces, it should call Better Auth set-active first, then use the active-org scoped `/v1/...` resource routes. + +## Middleware expectations + +- `requireUserMiddleware`: the route requires a signed-in user +- `resolveOrganizationContextMiddleware`: the route needs the current org and member context +- `resolveMemberTeamsMiddleware`: the route needs the teams for the current org member + +Import these from `src/middleware/index.ts` so route files stay consistent. + +## Validation expectations + +- Query, JSON body, and params should use Hono Zod validators +- Route files should read validated input with `c.req.valid(...)` +- Avoid direct `c.req.param()`, `c.req.query()`, or manual `safeParse()` in route handlers + +## Why this is split up + +The org surface is the largest migrated area so far. Splitting by concern keeps edits small and lets agents change invitations, members, roles, or templates without scanning one giant router file. diff --git a/ee/apps/den-api/src/routes/org/api-keys.ts b/ee/apps/den-api/src/routes/org/api-keys.ts new file mode 100644 index 0000000000..87b1dc023a --- /dev/null +++ b/ee/apps/den-api/src/routes/org/api-keys.ts @@ -0,0 +1,328 @@ +import type { Hono } from "hono" +import { describeRoute, resolver } from "hono-openapi" +import { z } from "zod" +import { + buildOrganizationApiKeyMetadata, + deleteOrganizationApiKey, + DEN_API_KEY_RATE_LIMIT_MAX, + DEN_API_KEY_RATE_LIMIT_TIME_WINDOW_MS, + listOrganizationApiKeys, +} from "../../api-keys.js" +import { jsonValidator, paramValidator, requireUserMiddleware, resolveOrganizationContextMiddleware } from "../../middleware/index.js" +import { denTypeIdSchema } from "../../openapi.js" +import { auth } from "../../auth.js" +import type { OrgRouteVariables } from "./shared.js" +import { ensureApiKeyManager, idParamSchema } from "./shared.js" + +const createOrganizationApiKeySchema = z.object({ + name: z.string().trim().min(2).max(64), +}).meta({ ref: "CreateOrganizationApiKeyRequest" }) + +const validationIssueSchema = z.object({ + message: z.string(), + path: z.array(z.union([z.string(), z.number()])).optional(), +}).passthrough() + +const invalidRequestSchema = z.object({ + error: z.literal("invalid_request"), + details: z.array(validationIssueSchema), +}).meta({ ref: "InvalidRequestError" }) + +const unauthorizedSchema = z.object({ + error: z.literal("unauthorized"), +}).meta({ ref: "UnauthorizedError" }) + +const organizationNotFoundSchema = z.object({ + error: z.literal("organization_not_found"), +}).meta({ ref: "OrganizationNotFoundError" }) + +const forbiddenApiKeyManagerSchema = z.object({ + error: z.literal("forbidden"), + message: z.string(), +}).meta({ ref: "OrganizationApiKeyForbiddenError" }) + +const apiKeyNotFoundSchema = z.object({ + error: z.literal("api_key_not_found"), +}).meta({ ref: "OrganizationApiKeyNotFoundError" }) + +const apiKeyOwnerSchema = z.object({ + userId: denTypeIdSchema("user"), + memberId: denTypeIdSchema("member"), + name: z.string(), + email: z.string().email(), + image: z.string().nullable(), +}).meta({ ref: "OrganizationApiKeyOwner" }) + +const organizationApiKeySchema = z.object({ + id: z.string(), + configId: z.string(), + name: z.string().nullable(), + start: z.string().nullable(), + prefix: z.string().nullable(), + enabled: z.boolean(), + rateLimitEnabled: z.boolean(), + rateLimitMax: z.number().int().nullable(), + rateLimitTimeWindow: z.number().int().nullable(), + lastRequest: z.string().datetime().nullable(), + expiresAt: z.string().datetime().nullable(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), + owner: apiKeyOwnerSchema, +}).meta({ ref: "OrganizationApiKey" }) + +const organizationApiKeyListResponseSchema = z.object({ + apiKeys: z.array(organizationApiKeySchema), +}).meta({ ref: "OrganizationApiKeyListResponse" }) + +const createdOrganizationApiKeySchema = z.object({ + id: z.string(), + name: z.string().nullable(), + start: z.string().nullable(), + prefix: z.string().nullable(), + enabled: z.boolean(), + rateLimitEnabled: z.boolean(), + rateLimitMax: z.number().int().nullable(), + rateLimitTimeWindow: z.number().int().nullable(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), +}).meta({ ref: "CreatedOrganizationApiKey" }) + +const createOrganizationApiKeyResponseSchema = z.object({ + apiKey: createdOrganizationApiKeySchema, + key: z.string().min(1), +}).meta({ ref: "CreateOrganizationApiKeyResponse" }) + +const apiKeyIdParamSchema = idParamSchema("apiKeyId") +const hideApiKeyGenerationRoute = () => process.env.NODE_ENV === "production" + +export function registerOrgApiKeyRoutes(app: Hono) { + app.get( + "/v1/api-keys", + describeRoute({ + tags: ["API Keys"], + summary: "List organization API keys", + description: "Returns the API keys that belong to the selected organization.", + security: [{ bearerAuth: [] }], + responses: { + 200: { + description: "Organization API keys", + content: { + "application/json": { + schema: resolver(organizationApiKeyListResponseSchema), + }, + }, + }, + 400: { + description: "Invalid request", + content: { + "application/json": { + schema: resolver(invalidRequestSchema), + }, + }, + }, + 401: { + description: "Unauthorized", + content: { + "application/json": { + schema: resolver(unauthorizedSchema), + }, + }, + }, + 403: { + description: "Only workspace owners and admins can list API keys.", + content: { + "application/json": { + schema: resolver(forbiddenApiKeyManagerSchema), + }, + }, + }, + 404: { + description: "Organization not found", + content: { + "application/json": { + schema: resolver(organizationNotFoundSchema), + }, + }, + }, + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + async (c) => { + const access = ensureApiKeyManager(c) + if (!access.ok) { + return c.json(access.response, access.response.error === "forbidden" ? 403 : 404) + } + + const payload = c.get("organizationContext") + const apiKeys = await listOrganizationApiKeys(payload.organization.id) + return c.json({ apiKeys }) + }, + ) + + app.post( + "/v1/api-keys", + describeRoute({ + tags: ["API Keys"], + summary: "Create an organization API key", + description: "Creates a new API key for the selected organization.", + hide: hideApiKeyGenerationRoute, + security: [{ bearerAuth: [] }], + responses: { + 201: { + description: "Organization API key created", + content: { + "application/json": { + schema: resolver(createOrganizationApiKeyResponseSchema), + }, + }, + }, + 400: { + description: "Invalid request", + content: { + "application/json": { + schema: resolver(invalidRequestSchema), + }, + }, + }, + 401: { + description: "Unauthorized", + content: { + "application/json": { + schema: resolver(unauthorizedSchema), + }, + }, + }, + 403: { + description: "Only workspace owners and admins can create API keys.", + content: { + "application/json": { + schema: resolver(forbiddenApiKeyManagerSchema), + }, + }, + }, + 404: { + description: "Organization not found", + content: { + "application/json": { + schema: resolver(organizationNotFoundSchema), + }, + }, + }, + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(createOrganizationApiKeySchema), + async (c) => { + const access = ensureApiKeyManager(c) + if (!access.ok) { + return c.json(access.response, access.response.error === "forbidden" ? 403 : 404) + } + + const payload = c.get("organizationContext") + const input = c.req.valid("json") + const created = await auth.api.createApiKey({ + body: { + userId: payload.currentMember.userId, + name: input.name, + metadata: buildOrganizationApiKeyMetadata({ + organizationId: payload.organization.id, + orgMembershipId: payload.currentMember.id, + issuedByUserId: payload.currentMember.userId, + issuedByOrgMembershipId: payload.currentMember.id, + }), + rateLimitEnabled: true, + rateLimitMax: DEN_API_KEY_RATE_LIMIT_MAX, + rateLimitTimeWindow: DEN_API_KEY_RATE_LIMIT_TIME_WINDOW_MS, + }, + }) + + return c.json({ + apiKey: { + id: created.id, + name: created.name, + start: created.start, + prefix: created.prefix, + enabled: created.enabled, + rateLimitEnabled: created.rateLimitEnabled, + rateLimitMax: created.rateLimitMax, + rateLimitTimeWindow: created.rateLimitTimeWindow, + createdAt: created.createdAt, + updatedAt: created.updatedAt, + }, + key: created.key, + }, 201) + }, + ) + + app.delete( + "/v1/api-keys/:apiKeyId", + describeRoute({ + tags: ["API Keys"], + hide: true, + summary: "Delete an organization API key", + description: "Deletes an API key from the selected organization.", + security: [{ bearerAuth: [] }], + responses: { + 204: { + description: "Organization API key deleted", + }, + 400: { + description: "Invalid request", + content: { + "application/json": { + schema: resolver(invalidRequestSchema), + }, + }, + }, + 401: { + description: "Unauthorized", + content: { + "application/json": { + schema: resolver(unauthorizedSchema), + }, + }, + }, + 403: { + description: "Only workspace owners and admins can delete API keys.", + content: { + "application/json": { + schema: resolver(forbiddenApiKeyManagerSchema), + }, + }, + }, + 404: { + description: "API key or organization not found", + content: { + "application/json": { + schema: resolver(z.union([organizationNotFoundSchema, apiKeyNotFoundSchema])), + }, + }, + }, + }, + }), + requireUserMiddleware, + paramValidator(apiKeyIdParamSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const access = ensureApiKeyManager(c) + if (!access.ok) { + return c.json(access.response, access.response.error === "forbidden" ? 403 : 404) + } + + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const deleted = await deleteOrganizationApiKey({ + organizationId: payload.organization.id, + apiKeyId: params.apiKeyId, + }) + + if (!deleted) { + return c.json({ error: "api_key_not_found" }, 404) + } + + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/core.ts b/ee/apps/den-api/src/routes/org/core.ts new file mode 100644 index 0000000000..3d9d5724a0 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/core.ts @@ -0,0 +1,395 @@ +import { eq } from "@openwork-ee/den-db/drizzle" +import { OrganizationTable } from "@openwork-ee/den-db/schema" +import { desktopAppRestrictionsSchema } from "@openwork/types/den/desktop-app-restrictions" +import { normalizeDenTypeId, type DenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { auth } from "../../auth.js" +import { requireCloudWorkerAccess } from "../../billing/polar.js" +import { db } from "../../db.js" +import { env } from "../../env.js" +import { jsonValidator, queryValidator, requireUserMiddleware, resolveMemberTeamsMiddleware, resolveOrganizationContextMiddleware } from "../../middleware/index.js" +import { denTypeIdSchema, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import { + acceptInvitationForUser, + createOrganizationForUser, + getInvitationPreview, + normalizeAllowedEmailDomains, + OrganizationEmailDomainRestrictionError, + setSessionActiveOrganization, + updateOrganizationSettings, +} from "../../orgs.js" +import { getRequiredUserEmail } from "../../user.js" +import type { OrgRouteVariables } from "./shared.js" +import { ensureOwner } from "./shared.js" + +const createOrganizationSchema = z.object({ + name: z.string().trim().min(2).max(120), +}) + +const updateOrganizationSchema = z.object({ + name: z.string().trim().min(2).max(120).optional(), + allowedEmailDomains: z.array(z.string().trim().min(1).max(255)).max(100).nullable().optional(), + desktopAppRestrictions: desktopAppRestrictionsSchema.optional(), + allowedDesktopVersions: z.array(z.string().trim().min(1).max(32)).max(200).nullable().optional(), +}).refine((value) => value.name !== undefined || value.allowedEmailDomains !== undefined || value.desktopAppRestrictions !== undefined || value.allowedDesktopVersions !== undefined, { + message: "Provide at least one organization field to update.", +}) + +const invitationPreviewQuerySchema = z.object({ + id: denTypeIdSchema("invitation"), +}) + +const acceptInvitationSchema = z.object({ + id: denTypeIdSchema("invitation"), +}) + +const organizationResponseSchema = z.object({ + organization: z.object({}).passthrough().nullable(), +}).meta({ ref: "OrganizationResponse" }) + +const organizationOwnerSchema = z.object({ + memberId: denTypeIdSchema("member"), + userId: denTypeIdSchema("user"), + name: z.string().nullable(), + email: z.string().email().nullable(), + image: z.string().nullable().optional(), +}).meta({ ref: "OrganizationOwner" }) + +const paymentRequiredSchema = z.object({ + error: z.literal("payment_required"), + message: z.string(), + polar: z.object({ + checkoutUrl: z.string().nullable(), + productId: z.string().nullable().optional(), + benefitId: z.string().nullable().optional(), + }).passthrough(), +}).meta({ ref: "PaymentRequiredError" }) + +const invitationPreviewResponseSchema = z.object({}).passthrough().meta({ ref: "InvitationPreviewResponse" }) + +const invitationAcceptedResponseSchema = z.object({ + accepted: z.literal(true), + organizationId: denTypeIdSchema("organization"), + organizationSlug: z.string().nullable(), + invitationId: denTypeIdSchema("invitation"), +}).meta({ ref: "InvitationAcceptedResponse" }) + +const organizationContextResponseSchema = z.object({ + organization: z.object({ + owner: organizationOwnerSchema.nullable().optional(), + }).passthrough(), + currentMember: z.object({}).passthrough(), + currentMemberTeams: z.array(z.object({}).passthrough()), +}).passthrough().meta({ ref: "OrganizationContextResponse" }) + +const userEmailRequiredSchema = z.object({ + error: z.literal("user_email_required"), +}).meta({ ref: "UserEmailRequiredError" }) + +const invalidEmailDomainSchema = z.object({ + error: z.literal("invalid_email_domain"), + message: z.string(), + invalidDomains: z.array(z.string()), +}).meta({ ref: "InvalidEmailDomainError" }) + +const accountEmailDomainNotAllowedSchema = z.object({ + error: z.literal("account_email_domain_not_allowed"), + message: z.string(), + emailDomain: z.string().nullable(), + allowedEmailDomains: z.array(z.string()), +}).meta({ ref: "AccountEmailDomainNotAllowedError" }) + +function getStoredSessionId(session: { id?: string | null } | null) { + if (!session?.id) { + return null + } + + try { + return normalizeDenTypeId("session", session.id) + } catch { + return null + } +} + +async function setRequestActiveOrganization( + c: { + get: (key: "session") => { id?: string | null } | null + req: { raw: Request } + }, + organizationId: DenTypeId<"organization"> | null, +) { + try { + await auth.api.setActiveOrganization({ + body: { organizationId }, + headers: c.req.raw.headers, + }) + return + } catch {} + + const sessionId = getStoredSessionId(c.get("session")) + if (sessionId) { + await setSessionActiveOrganization(sessionId, organizationId) + } +} + +export function registerOrgCoreRoutes(app: Hono) { + app.post( + "/v1/org", + describeRoute({ + tags: ["Organizations"], + hide: true, + summary: "Create organization", + description: "Creates a new organization for the signed-in user after verifying that their account can provision OpenWork Cloud workspaces.", + responses: { + 201: jsonResponse("Organization created successfully.", organizationResponseSchema), + 400: jsonResponse("The organization creation request body was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create an organization.", unauthorizedSchema), + 402: jsonResponse("The caller needs an active cloud plan before creating an organization.", paymentRequiredSchema), + 403: jsonResponse("API keys cannot create organizations.", forbiddenSchema), + }, + }), + requireUserMiddleware, + jsonValidator(createOrganizationSchema), + async (c) => { + if (c.get("apiKey")) { + return c.json({ + error: "forbidden", + message: "API keys cannot create organizations.", + }, 403) + } + + const user = c.get("user") + const input = c.req.valid("json") + const email = getRequiredUserEmail(user) + + if (!email) { + return c.json({ error: "user_email_required" }, 400) + } + + const access = await requireCloudWorkerAccess({ + userId: normalizeDenTypeId("user", user.id), + email, + name: user.name ?? user.email ?? "OpenWork User", + }) + + if (!access.allowed) { + return c.json({ + error: "payment_required", + message: "Creating a workspace requires an active OpenWork Cloud plan.", + polar: { + checkoutUrl: access.checkoutUrl, + productId: env.polar.productId, + benefitId: env.polar.benefitId, + }, + }, 402) + } + + const organizationId = await createOrganizationForUser({ + userId: normalizeDenTypeId("user", user.id), + name: input.name, + }) + + await setRequestActiveOrganization(c, organizationId) + + const organization = await db + .select() + .from(OrganizationTable) + .where(eq(OrganizationTable.id, organizationId)) + .limit(1) + + return c.json({ organization: organization[0] ?? null }, 201) + }, + ) + + app.get( + "/v1/orgs/invitations/preview", + describeRoute({ + tags: ["Invitations"], + summary: "Preview organization invitation", + description: "Returns invitation preview details so a user can inspect an organization invite before accepting it.", + responses: { + 200: jsonResponse("Invitation preview returned successfully.", invitationPreviewResponseSchema), + 400: jsonResponse("The invitation preview query parameters were invalid.", invalidRequestSchema), + 404: jsonResponse("The invitation could not be found.", notFoundSchema), + }, + }), + queryValidator(invitationPreviewQuerySchema), + async (c) => { + const query = c.req.valid("query") + const invitation = await getInvitationPreview(query.id) + + if (!invitation) { + return c.json({ error: "invitation_not_found" }, 404) + } + + return c.json(invitation) + }, + ) + + app.post( + "/v1/orgs/invitations/accept", + describeRoute({ + tags: ["Invitations"], + summary: "Accept organization invitation", + description: "Accepts an organization invitation for the current signed-in user and switches their active organization to the accepted workspace.", + responses: { + 200: jsonResponse("Invitation accepted successfully.", invitationAcceptedResponseSchema), + 400: jsonResponse("The invitation acceptance request body was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to accept an invitation.", unauthorizedSchema), + 403: jsonResponse("API keys cannot accept organization invitations.", forbiddenSchema), + 409: jsonResponse("The current account email is not allowed to join this organization.", accountEmailDomainNotAllowedSchema), + 404: jsonResponse("The invitation could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + jsonValidator(acceptInvitationSchema), + async (c) => { + if (c.get("apiKey")) { + return c.json({ + error: "forbidden", + message: "API keys cannot accept organization invitations.", + }, 403) + } + + const user = c.get("user") + const input = c.req.valid("json") + const email = getRequiredUserEmail(user) + + if (!email) { + return c.json({ error: "user_email_required" }, 400) + } + + let accepted + try { + accepted = await acceptInvitationForUser({ + userId: normalizeDenTypeId("user", user.id), + email, + invitationId: input.id, + }) + } catch (error) { + if (error instanceof OrganizationEmailDomainRestrictionError) { + return c.json({ + error: "account_email_domain_not_allowed", + message: error.message, + emailDomain: error.emailDomain, + allowedEmailDomains: error.allowedEmailDomains, + }, 409) + } + throw error + } + + if (!accepted) { + return c.json({ error: "invitation_not_found" }, 404) + } + + await setRequestActiveOrganization(c, accepted.member.organizationId) + + const orgRows = await db + .select({ slug: OrganizationTable.slug }) + .from(OrganizationTable) + .where(eq(OrganizationTable.id, accepted.member.organizationId)) + .limit(1) + + return c.json({ + accepted: true, + organizationId: accepted.member.organizationId, + organizationSlug: orgRows[0]?.slug ?? null, + invitationId: accepted.invitation.id, + }) + }, + ) + + app.patch( + "/v1/org", + describeRoute({ + tags: ["Organizations"], + summary: "Update organization", + description: "Updates organization fields that workspace owners are allowed to change, including the display name, allowed invitation email domains, and desktop app restrictions. The slug is immutable to avoid breaking dashboard URLs.", + responses: { + 200: jsonResponse("Organization updated successfully.", organizationResponseSchema), + 400: jsonResponse("The organization update request body was invalid or contained malformed email domains.", invalidEmailDomainSchema), + 401: jsonResponse("The caller must be signed in to update an organization.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners can update the organization.", forbiddenSchema), + 404: jsonResponse("The organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(updateOrganizationSchema), + async (c) => { + const permission = ensureOwner(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const input = c.req.valid("json") + + const normalizedDomains = input.allowedEmailDomains === undefined + ? { domains: undefined, invalidDomains: [] as string[] } + : normalizeAllowedEmailDomains(input.allowedEmailDomains) + + if (normalizedDomains.invalidDomains.length > 0) { + return c.json({ + error: "invalid_email_domain", + message: "Enter valid email domains like company.com.", + invalidDomains: normalizedDomains.invalidDomains, + }, 400) + } + + const updated = await updateOrganizationSettings({ + organizationId: payload.organization.id, + name: input.name, + allowedEmailDomains: normalizedDomains.domains, + desktopAppRestrictions: input.desktopAppRestrictions, + allowedDesktopVersions: input.allowedDesktopVersions, + }) + + if (!updated) { + return c.json({ error: "organization_not_found" }, 404) + } + + return c.json({ organization: updated }) + }, + ) + + app.get( + "/v1/org", + describeRoute({ + tags: ["Organizations"], + summary: "Get active organization", + description: "Returns the active organization from the current session, including its owner, the current member record, and their team memberships.", + responses: { + 200: jsonResponse("Organization context returned successfully.", organizationContextResponseSchema), + 401: jsonResponse("The caller must be signed in to load organization context.", unauthorizedSchema), + 404: jsonResponse("The organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + resolveMemberTeamsMiddleware, + (c) => { + const payload = c.get("organizationContext") + const owner = payload.members.find((member: typeof payload.members[number]) => member.isOwner) ?? null + + return c.json({ + ...payload, + organization: { + ...payload.organization, + owner: owner + ? { + memberId: owner.id, + userId: owner.user.id, + name: owner.user.name, + email: owner.user.email, + image: owner.user.image, + } + : null, + }, + currentMemberTeams: c.get("memberTeams") ?? [], + }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/index.ts b/ee/apps/den-api/src/routes/org/index.ts new file mode 100644 index 0000000000..40bd0d3d68 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/index.ts @@ -0,0 +1,68 @@ +import type { Hono } from "hono" +import { registerOrgApiKeyRoutes } from "./api-keys.js" +import { LEGACY_ORG_PROXY_HEADER } from "../../middleware/user-organizations.js" +import type { OrgRouteVariables } from "./shared.js" +import { registerOrgCoreRoutes } from "./core.js" +import { registerOrgInvitationRoutes } from "./invitations.js" +import { registerOrgLlmProviderRoutes } from "./llm-providers.js" +import { registerOrgMemberRoutes } from "./members.js" +import { registerPluginArchRoutes } from "./plugin-system/routes.js" +import { registerOrgRoleRoutes } from "./roles.js" +import { registerOrgSkillRoutes } from "./skills.js" +import { registerOrgTeamRoutes } from "./teams.js" + +const LEGACY_ORG_PATH_PREFIX = "/v1/orgs/" + +function extractLegacyOrgProxyTarget(pathname: string) { + if (!pathname.startsWith(LEGACY_ORG_PATH_PREFIX)) { + return null + } + + const remainder = pathname.slice(LEGACY_ORG_PATH_PREFIX.length) + const slashIndex = remainder.indexOf("/") + if (slashIndex <= 0) { + return null + } + + const organizationId = remainder.slice(0, slashIndex) + if (!organizationId.startsWith("org_")) { + return null + } + + const targetPath = `/v1${remainder.slice(slashIndex)}` + if (targetPath === pathname) { + return null + } + + return { organizationId, targetPath } +} + +export function registerOrgRoutes(app: Hono) { + registerOrgCoreRoutes(app) + registerOrgApiKeyRoutes(app) + registerOrgInvitationRoutes(app) + registerOrgLlmProviderRoutes(app) + registerOrgMemberRoutes(app) + registerPluginArchRoutes(app) + registerOrgRoleRoutes(app) + registerOrgSkillRoutes(app) + registerOrgTeamRoutes(app) + + app.all("/v1/orgs/:orgId/*", async (c) => { + const url = new URL(c.req.raw.url) + const target = extractLegacyOrgProxyTarget(url.pathname) + if (!target) { + return c.json({ error: "not_found" }, 404) + } + + const proxiedUrl = new URL(url) + proxiedUrl.pathname = target.targetPath + + const headers = new Headers(c.req.raw.headers) + headers.set(LEGACY_ORG_PROXY_HEADER, target.organizationId) + + const proxiedRequest = new Request(new Request(proxiedUrl, c.req.raw), { headers }) + + return app.fetch(proxiedRequest, c.env) + }) +} diff --git a/ee/apps/den-api/src/routes/org/invitations.ts b/ee/apps/den-api/src/routes/org/invitations.ts new file mode 100644 index 0000000000..ae20fa0641 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/invitations.ts @@ -0,0 +1,242 @@ +import { and, eq, gt } from "@openwork-ee/den-db/drizzle" +import { AuthUserTable, InvitationTable, MemberTable } from "@openwork-ee/den-db/schema" +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { DenEmailSendError, sendDenOrganizationInvitationEmail } from "../../email.js" +import { jsonValidator, paramValidator, requireUserMiddleware, resolveOrganizationContextMiddleware } from "../../middleware/index.js" +import { denTypeIdSchema, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, successSchema, unauthorizedSchema } from "../../openapi.js" +import { getOrganizationLimitStatus } from "../../organization-limits.js" +import { isEmailAllowedForOrganization, listAssignableRoles } from "../../orgs.js" +import type { OrgRouteVariables } from "./shared.js" +import { buildInvitationLink, createInvitationId, ensureInviteManager, idParamSchema, normalizeRoleName } from "./shared.js" + +const inviteMemberSchema = z.object({ + email: z.string().email(), + role: z.string().trim().min(1).max(64), +}) + +const invitationResponseSchema = z.object({ + invitationId: denTypeIdSchema("invitation"), + email: z.string().email(), + role: z.string(), + expiresAt: z.string().datetime(), +}).meta({ ref: "InvitationResponse" }) + +const invitationEmailFailedSchema = z.object({ + error: z.literal("invitation_email_failed"), + reason: z.enum(["loops_not_configured", "loops_rejected", "loops_network"]), + message: z.string(), + invitationId: denTypeIdSchema("invitation"), +}).meta({ ref: "InvitationEmailFailedError" }) + +const inviteEmailDomainNotAllowedSchema = z.object({ + error: z.literal("invite_email_domain_not_allowed"), + message: z.string(), + emailDomain: z.string().nullable(), + allowedEmailDomains: z.array(z.string()), +}).meta({ ref: "InviteEmailDomainNotAllowedError" }) + +type InvitationId = typeof InvitationTable.$inferSelect.id + +const orgInvitationParamsSchema = idParamSchema("invitationId", "invitation") + +export function registerOrgInvitationRoutes(app: Hono) { + app.post( + "/v1/invitations", + describeRoute({ + tags: ["Invitations"], + summary: "Create organization invitation", + description: "Creates or refreshes a pending organization invitation for an email address and sends the invite email. Returns 502 when the invitation row is persisted but the email provider (Loops) failed to send; the client should surface the error and give the user a retry affordance.", + responses: { + 200: jsonResponse("Existing invitation refreshed successfully.", invitationResponseSchema), + 201: jsonResponse("Invitation created successfully.", invitationResponseSchema), + 400: jsonResponse("The invitation request body or path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to invite organization members.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners and admins can create or resend invitations.", forbiddenSchema), + 404: jsonResponse("The organization could not be found.", notFoundSchema), + 409: jsonResponse("The email address is outside this workspace's allowed domains.", inviteEmailDomainNotAllowedSchema), + 502: jsonResponse("The invitation was saved but the email provider (Loops) rejected or failed to deliver it. Retry by submitting the same email again.", invitationEmailFailedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(inviteMemberSchema), + async (c) => { + const permission = ensureInviteManager(c) + if (!permission.ok) { + return c.json(permission.response, permission.response.error === "forbidden" ? 403 : 404) + } + + const payload = c.get("organizationContext") + const user = c.get("user") + const input = c.req.valid("json") + + const email = input.email.trim().toLowerCase() + if (!isEmailAllowedForOrganization(payload.organization.allowedEmailDomains, email)) { + const emailDomain = email.includes("@") ? email.slice(email.lastIndexOf("@") + 1) : null + return c.json({ + error: "invite_email_domain_not_allowed", + message: + payload.organization.allowedEmailDomains && payload.organization.allowedEmailDomains.length === 1 + ? `This workspace only allows ${payload.organization.allowedEmailDomains[0]} email addresses.` + : `This workspace only allows email addresses from these domains: ${(payload.organization.allowedEmailDomains ?? []).join(", ")}.`, + emailDomain, + allowedEmailDomains: payload.organization.allowedEmailDomains ?? [], + }, 409) + } + + const availableRoles = await listAssignableRoles(payload.organization.id) + const role = normalizeRoleName(input.role) + if (!availableRoles.has(role)) { + return c.json({ error: "invalid_role", message: "Choose one of the existing organization roles." }, 400) + } + + const existingMembers = await db + .select({ id: MemberTable.id }) + .from(MemberTable) + .innerJoin(AuthUserTable, eq(MemberTable.userId, AuthUserTable.id)) + .where(and(eq(MemberTable.organizationId, payload.organization.id), eq(AuthUserTable.email, email))) + .limit(1) + + if (existingMembers[0]) { + return c.json({ + error: "member_exists", + message: "That email address is already a member of this organization.", + }, 409) + } + + const existingInvitation = await db + .select() + .from(InvitationTable) + .where( + and( + eq(InvitationTable.organizationId, payload.organization.id), + eq(InvitationTable.email, email), + eq(InvitationTable.status, "pending"), + gt(InvitationTable.expiresAt, new Date()), + ), + ) + .limit(1) + + if (!existingInvitation[0]) { + const memberLimit = await getOrganizationLimitStatus(payload.organization.id, "members") + if (memberLimit.exceeded) { + return c.json({ + error: "org_limit_reached", + limitType: "members", + limit: memberLimit.limit, + currentCount: memberLimit.currentCount, + message: `This workspace currently supports up to ${memberLimit.limit} members. Contact support to increase the limit.`, + }, 409) + } + } + + const expiresAt = new Date(Date.now() + 1000 * 60 * 60 * 24 * 7) + const invitationId = existingInvitation[0]?.id ?? createInvitationId() + + if (existingInvitation[0]) { + await db + .update(InvitationTable) + .set({ role, inviterId: normalizeDenTypeId("user", user.id), expiresAt }) + .where(eq(InvitationTable.id, existingInvitation[0].id)) + } else { + await db.insert(InvitationTable).values({ + id: invitationId, + organizationId: payload.organization.id, + email, + role, + status: "pending", + inviterId: normalizeDenTypeId("user", user.id), + expiresAt, + }) + } + + try { + await sendDenOrganizationInvitationEmail({ + email, + inviteLink: buildInvitationLink(invitationId), + invitedByName: user.name ?? user.email ?? "OpenWork", + invitedByEmail: user.email ?? "", + organizationName: payload.organization.name, + role, + }) + } catch (error) { + if (error instanceof DenEmailSendError) { + // The invitation row is already persisted (step above). Log at error + // level so operators can grep, and return a 502 so the caller can + // render a real failure instead of a silent success. The invitation + // id is included so the UI can correlate and offer a direct retry. + console.error( + `[auth][invite_email_failed] organization=${payload.organization.id} invitation=${invitationId} email=${email} reason=${error.reason}${error.detail ? ` detail=${error.detail}` : ""}`, + ) + + return c.json({ + error: "invitation_email_failed" as const, + reason: error.reason, + message: + error.reason === "loops_not_configured" + ? "The invitation email provider (Loops) is not configured on this deployment." + : error.reason === "loops_network" + ? "Could not reach the invitation email provider. The invitation is saved; retry to send again." + : `The invitation email provider rejected the send${error.detail ? `: ${error.detail}` : "."}`, + invitationId, + }, 502) + } + + throw error + } + + return c.json({ invitationId, email, role, expiresAt }, existingInvitation[0] ? 200 : 201) + }, + ) + + app.post( + "/v1/invitations/:invitationId/cancel", + describeRoute({ + tags: ["Invitations"], + summary: "Cancel organization invitation", + description: "Cancels a pending organization invitation so the invite link can no longer be used.", + responses: { + 200: jsonResponse("Invitation cancelled successfully.", successSchema), + 400: jsonResponse("The invitation cancellation path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to cancel invitations.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners and admins can cancel invitations.", forbiddenSchema), + 404: jsonResponse("The invitation or organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgInvitationParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const permission = ensureInviteManager(c) + if (!permission.ok) { + return c.json(permission.response, permission.response.error === "forbidden" ? 403 : 404) + } + + const payload = c.get("organizationContext") + const params = c.req.valid("param") + let invitationId: InvitationId + try { + invitationId = normalizeDenTypeId("invitation", params.invitationId) + } catch { + return c.json({ error: "invitation_not_found" }, 404) + } + + const invitationRows = await db + .select({ id: InvitationTable.id }) + .from(InvitationTable) + .where(and(eq(InvitationTable.id, invitationId), eq(InvitationTable.organizationId, payload.organization.id))) + .limit(1) + + if (!invitationRows[0]) { + return c.json({ error: "invitation_not_found" }, 404) + } + + await db.update(InvitationTable).set({ status: "canceled" }).where(eq(InvitationTable.id, invitationId)) + return c.json({ success: true }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/llm-providers.ts b/ee/apps/den-api/src/routes/org/llm-providers.ts new file mode 100644 index 0000000000..7b8f7efd92 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/llm-providers.ts @@ -0,0 +1,1041 @@ +import { and, desc, eq, inArray, isNotNull, or } from "@openwork-ee/den-db/drizzle" +import { + AuthUserTable, + LlmProviderAccessTable, + LlmProviderModelTable, + LlmProviderTable, + MemberTable, + TeamTable, +} from "@openwork-ee/den-db/schema" +import { createDenTypeId, normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { + jsonValidator, + paramValidator, + requireUserMiddleware, + resolveMemberTeamsMiddleware, + resolveOrganizationContextMiddleware, +} from "../../middleware/index.js" +import { getModelsDevProvider, listModelsDevProviders } from "../../llm/models-dev.js" +import type { MemberTeamsContext } from "../../middleware/member-teams.js" +import { denTypeIdSchema, emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import type { OrgRouteVariables } from "./shared.js" +import { idParamSchema, memberHasRole } from "./shared.js" + +type JsonRecord = Record +type LlmProviderId = typeof LlmProviderTable.$inferSelect.id +type LlmProviderAccessId = typeof LlmProviderAccessTable.$inferSelect.id +type MemberId = typeof MemberTable.$inferSelect.id +type TeamId = typeof TeamTable.$inferSelect.id +type LlmProviderRow = typeof LlmProviderTable.$inferSelect + +type RouteFailure = { + status: number + error: string + message?: string +} + +const providerCatalogParamsSchema = z.object({ + providerId: z.string().trim().min(1).max(255), +}) + +const orgLlmProviderParamsSchema = idParamSchema("llmProviderId", "llmProvider") + +const customModelSchema = z.object({ + id: z.string().trim().min(1).max(255), + name: z.string().trim().min(1).max(255), +}).passthrough() + +const customProviderSchema = z.object({ + id: z.string().trim().min(1).max(255), + name: z.string().trim().min(1).max(255), + npm: z.string().trim().min(1).max(255), + env: z.array(z.string().trim().min(1).max(255)).min(1), + doc: z.string().trim().min(1).max(2048), + api: z.string().trim().min(1).max(2048).optional(), + models: z.array(customModelSchema).min(1), +}).passthrough() + +const llmProviderWriteSchema = z.object({ + name: z.string().trim().min(1).max(255), + source: z.enum(["models_dev", "custom"]), + providerId: z.string().trim().min(1).max(255).optional(), + modelIds: z.array(z.string().trim().min(1).max(255)).min(1).optional(), + customConfigText: z.string().trim().min(1).optional(), + apiKey: z.string().trim().max(65535).optional(), + memberIds: z.array(denTypeIdSchema("member")).max(500).optional().default([]), + teamIds: z.array(denTypeIdSchema("team")).max(500).optional().default([]), +}).superRefine((value, ctx) => { + if (value.source === "models_dev") { + if (!value.providerId) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["providerId"], + message: "Select a provider.", + }) + } + + if (!value.modelIds?.length) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["modelIds"], + message: "Select at least one model.", + }) + } + } + + if (value.source === "custom" && !value.customConfigText) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["customConfigText"], + message: "Paste a custom provider config.", + }) + } +}) + +const providerCatalogListResponseSchema = z.object({ + providers: z.array(z.object({}).passthrough()), +}).meta({ ref: "LlmProviderCatalogListResponse" }) + +const providerCatalogResponseSchema = z.object({ + provider: z.object({}).passthrough(), +}).meta({ ref: "LlmProviderCatalogResponse" }) + +const llmProviderListResponseSchema = z.object({ + llmProviders: z.array(z.object({}).passthrough()), +}).meta({ ref: "LlmProviderListResponse" }) + +const llmProviderResponseSchema = z.object({ + llmProvider: z.object({}).passthrough(), +}).meta({ ref: "LlmProviderResponse" }) + +const providerCatalogUnavailableSchema = z.object({ + error: z.literal("provider_catalog_unavailable"), + message: z.string(), +}).meta({ ref: "ProviderCatalogUnavailableError" }) + +const conflictSchema = z.object({ + error: z.string(), + message: z.string().optional(), +}).meta({ ref: "ConflictError" }) + +function createFailure(status: number, error: string, message?: string): RouteFailure { + return { status, error, message } +} + +function isRouteFailure(value: unknown): value is RouteFailure { + return typeof value === "object" && value !== null && "status" in value && "error" in value +} + +function isOrganizationAdmin(payload: { currentMember: { isOwner: boolean; role: string } }) { + return payload.currentMember.isOwner || memberHasRole(payload.currentMember.role, "admin") +} + +function canManageLlmProvider( + payload: { currentMember: { id: MemberId; isOwner: boolean; role: string } }, + provider: LlmProviderRow, +) { + return isOrganizationAdmin(payload) || provider.createdByOrgMembershipId === payload.currentMember.id +} + +async function canAccessLlmProvider(input: { + organizationId: typeof LlmProviderTable.$inferSelect.organizationId + llmProviderId: LlmProviderId + currentMemberId: MemberId + memberTeams: Array<{ id: TeamId }> + isAdmin: boolean +}) { + if (input.isAdmin) { + return true + } + + const access = await listAccessibleProviderAccess({ + organizationId: input.organizationId, + currentMemberId: input.currentMemberId, + memberTeams: input.memberTeams, + }) + + return access.some((entry) => entry.llmProviderId === input.llmProviderId) +} + +function parseLlmProviderId(value: string) { + return normalizeDenTypeId("llmProvider", value) +} + +function parseLlmProviderAccessId(value: string) { + return normalizeDenTypeId("llmProviderAccess", value) +} + +function parseMemberId(value: string) { + return normalizeDenTypeId("member", value) +} + +function parseTeamId(value: string) { + return normalizeDenTypeId("team", value) +} + +async function listAccessibleProviderAccess(input: { + organizationId: typeof LlmProviderTable.$inferSelect.organizationId + currentMemberId: MemberId + memberTeams: Array<{ id: TeamId }> +}) { + const teamIds = input.memberTeams.map((team) => team.id) + const accessWhere = teamIds.length > 0 + ? and( + eq(LlmProviderTable.organizationId, input.organizationId), + or( + eq(LlmProviderAccessTable.orgMembershipId, input.currentMemberId), + inArray(LlmProviderAccessTable.teamId, teamIds), + ), + ) + : and( + eq(LlmProviderTable.organizationId, input.organizationId), + eq(LlmProviderAccessTable.orgMembershipId, input.currentMemberId), + ) + + return db + .select({ + id: LlmProviderAccessTable.id, + llmProviderId: LlmProviderAccessTable.llmProviderId, + orgMembershipId: LlmProviderAccessTable.orgMembershipId, + teamId: LlmProviderAccessTable.teamId, + createdAt: LlmProviderAccessTable.createdAt, + }) + .from(LlmProviderAccessTable) + .innerJoin(LlmProviderTable, eq(LlmProviderAccessTable.llmProviderId, LlmProviderTable.id)) + .where(accessWhere) +} + +async function resolveMemberIds(input: { + organizationId: typeof LlmProviderTable.$inferSelect.organizationId + values: string[] +}) { + const uniqueValues = [...new Set(input.values)] + if (uniqueValues.length === 0) { + return [] as MemberId[] + } + + const memberIds = uniqueValues.map((value) => { + try { + return parseMemberId(value) + } catch { + throw createFailure(404, "member_not_found") + } + }) + + const rows = await db + .select({ id: MemberTable.id }) + .from(MemberTable) + .where(and(eq(MemberTable.organizationId, input.organizationId), inArray(MemberTable.id, memberIds))) + + if (rows.length !== memberIds.length) { + throw createFailure(404, "member_not_found") + } + + return memberIds +} + +async function resolveTeamIds(input: { + organizationId: typeof LlmProviderTable.$inferSelect.organizationId + values: string[] +}) { + const uniqueValues = [...new Set(input.values)] + if (uniqueValues.length === 0) { + return [] as TeamId[] + } + + const teamIds = uniqueValues.map((value) => { + try { + return parseTeamId(value) + } catch { + throw createFailure(404, "team_not_found") + } + }) + + const rows = await db + .select({ id: TeamTable.id }) + .from(TeamTable) + .where(and(eq(TeamTable.organizationId, input.organizationId), inArray(TeamTable.id, teamIds))) + + if (rows.length !== teamIds.length) { + throw createFailure(404, "team_not_found") + } + + return teamIds +} + +async function normalizeLlmProviderInput(input: z.infer) { + if (input.source === "models_dev") { + const provider = await getModelsDevProvider(input.providerId ?? "") + if (!provider) { + throw createFailure(404, "provider_not_found", "The selected provider was not found in models.dev.") + } + + const requestedModelIds = [...new Set(input.modelIds ?? [])] + const modelsById = new Map(provider.models.map((model) => [model.id, model])) + const models = requestedModelIds.map((modelId) => { + const model = modelsById.get(modelId) + if (!model) { + throw createFailure(404, "model_not_found", `Model ${modelId} is not available for ${provider.name}.`) + } + return model + }) + + const apiKey = input.apiKey?.trim() || null + + return { + source: input.source, + providerId: provider.id, + name: input.name, + providerConfig: provider.config, + models: models.map((model) => ({ + id: model.id, + name: model.name, + config: model.config, + })), + apiKey, + } + } + + let parsed: unknown + try { + parsed = JSON.parse(input.customConfigText ?? "") + } catch { + throw createFailure(400, "invalid_custom_provider_config", "Custom provider config must be valid JSON.") + } + + const customProvider = customProviderSchema.safeParse(parsed) + if (!customProvider.success) { + throw createFailure( + 400, + "invalid_custom_provider_config", + customProvider.error.issues[0]?.message ?? "Custom provider config is invalid.", + ) + } + + const { models, ...providerConfig } = customProvider.data + + return { + source: input.source, + providerId: customProvider.data.id, + name: input.name, + providerConfig: providerConfig as JsonRecord, + models: models.map((model) => ({ + id: model.id, + name: model.name, + config: model as JsonRecord, + })), + apiKey: input.apiKey?.trim() || null, + } +} + +async function loadLlmProviders(input: { + organizationId: typeof LlmProviderTable.$inferSelect.organizationId + currentMemberId: MemberId + memberTeams: Array<{ id: TeamId }> + isAdmin: boolean +}) { + const accessibleAccess = input.isAdmin + ? [] + : await listAccessibleProviderAccess({ + organizationId: input.organizationId, + currentMemberId: input.currentMemberId, + memberTeams: input.memberTeams, + }) + + const accessibleProviderIds = [...new Set(accessibleAccess.map((entry) => entry.llmProviderId))] + if (!input.isAdmin && accessibleProviderIds.length === 0) { + return [] + } + + const providers = await db + .select() + .from(LlmProviderTable) + .where( + input.isAdmin + ? eq(LlmProviderTable.organizationId, input.organizationId) + : and( + eq(LlmProviderTable.organizationId, input.organizationId), + inArray(LlmProviderTable.id, accessibleProviderIds), + ), + ) + .orderBy(desc(LlmProviderTable.updatedAt)) + + if (providers.length === 0) { + return [] + } + + const providerIds = providers.map((provider) => provider.id) + const models = await db + .select() + .from(LlmProviderModelTable) + .where(inArray(LlmProviderModelTable.llmProviderId, providerIds)) + + const memberAccessRows = await db + .select({ + access: { + id: LlmProviderAccessTable.id, + llmProviderId: LlmProviderAccessTable.llmProviderId, + createdAt: LlmProviderAccessTable.createdAt, + }, + member: { + id: MemberTable.id, + role: MemberTable.role, + }, + user: { + id: AuthUserTable.id, + name: AuthUserTable.name, + email: AuthUserTable.email, + image: AuthUserTable.image, + }, + }) + .from(LlmProviderAccessTable) + .innerJoin(MemberTable, eq(LlmProviderAccessTable.orgMembershipId, MemberTable.id)) + .innerJoin(AuthUserTable, eq(MemberTable.userId, AuthUserTable.id)) + .where(and(inArray(LlmProviderAccessTable.llmProviderId, providerIds), isNotNull(LlmProviderAccessTable.orgMembershipId))) + + const teamAccessRows = await db + .select({ + access: { + id: LlmProviderAccessTable.id, + llmProviderId: LlmProviderAccessTable.llmProviderId, + createdAt: LlmProviderAccessTable.createdAt, + }, + team: { + id: TeamTable.id, + name: TeamTable.name, + createdAt: TeamTable.createdAt, + updatedAt: TeamTable.updatedAt, + }, + }) + .from(LlmProviderAccessTable) + .innerJoin(TeamTable, eq(LlmProviderAccessTable.teamId, TeamTable.id)) + .where(and(inArray(LlmProviderAccessTable.llmProviderId, providerIds), isNotNull(LlmProviderAccessTable.teamId))) + + const modelsByProviderId = new Map() + for (const model of models) { + const existing = modelsByProviderId.get(model.llmProviderId) ?? [] + existing.push(model) + modelsByProviderId.set(model.llmProviderId, existing) + } + + const memberAccessByProviderId = new Map() + for (const row of memberAccessRows) { + const existing = memberAccessByProviderId.get(row.access.llmProviderId) ?? [] + existing.push(row) + memberAccessByProviderId.set(row.access.llmProviderId, existing) + } + + const teamAccessByProviderId = new Map() + for (const row of teamAccessRows) { + const existing = teamAccessByProviderId.get(row.access.llmProviderId) ?? [] + existing.push(row) + teamAccessByProviderId.set(row.access.llmProviderId, existing) + } + + const accessibleViaByProviderId = new Map() + for (const row of accessibleAccess) { + const existing = accessibleViaByProviderId.get(row.llmProviderId) ?? { orgMembershipIds: [], teamIds: [] } + if (row.orgMembershipId && !existing.orgMembershipIds.includes(row.orgMembershipId)) { + existing.orgMembershipIds.push(row.orgMembershipId) + } + if (row.teamId && !existing.teamIds.includes(row.teamId)) { + existing.teamIds.push(row.teamId) + } + accessibleViaByProviderId.set(row.llmProviderId, existing) + } + + return providers.map((provider) => ({ + ...provider, + hasApiKey: Boolean(provider.apiKey && provider.apiKey.trim().length > 0), + models: (modelsByProviderId.get(provider.id) ?? []) + .map((model) => ({ + id: model.modelId, + name: model.name, + config: model.modelConfig, + createdAt: model.createdAt, + })) + .sort((left, right) => left.name.localeCompare(right.name)), + access: { + members: (memberAccessByProviderId.get(provider.id) ?? []).map((row) => ({ + id: row.access.id, + orgMembershipId: row.member.id, + role: row.member.role, + user: row.user, + createdAt: row.access.createdAt, + })), + teams: (teamAccessByProviderId.get(provider.id) ?? []).map((row) => ({ + id: row.access.id, + teamId: row.team.id, + name: row.team.name, + createdAt: row.team.createdAt, + updatedAt: row.team.updatedAt, + })), + }, + accessibleVia: accessibleViaByProviderId.get(provider.id) ?? { orgMembershipIds: [], teamIds: [] }, + })) +} + +export function registerOrgLlmProviderRoutes }>(app: Hono) { + app.get( + "/v1/llm-provider-catalog", + describeRoute({ + tags: ["LLM Providers"], + summary: "List LLM provider catalog", + description: "Lists the provider catalog from models.dev so an organization can choose which LLM providers to configure.", + responses: { + 200: jsonResponse("Provider catalog returned successfully.", providerCatalogListResponseSchema), + 400: jsonResponse("The provider catalog path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to browse the provider catalog.", unauthorizedSchema), + 502: jsonResponse("The external provider catalog was unavailable.", providerCatalogUnavailableSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + async (c) => { + try { + const providers = await listModelsDevProviders() + return c.json({ providers }) + } catch (error) { + return c.json({ + error: "provider_catalog_unavailable", + message: error instanceof Error ? error.message : "Could not load the models.dev catalog.", + }, 502) + } + }, + ) + + app.get( + "/v1/llm-provider-catalog/:providerId", + describeRoute({ + tags: ["LLM Providers"], + summary: "Get LLM provider catalog entry", + description: "Returns the full models.dev catalog record for one provider, including its config template and model list.", + responses: { + 200: jsonResponse("Provider catalog entry returned successfully.", providerCatalogResponseSchema), + 400: jsonResponse("The provider catalog path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to inspect provider catalog entries.", unauthorizedSchema), + 404: jsonResponse("The requested provider catalog entry could not be found.", notFoundSchema), + 502: jsonResponse("The external provider catalog was unavailable.", providerCatalogUnavailableSchema), + }, + }), + requireUserMiddleware, + paramValidator(providerCatalogParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const params = c.req.valid("param") + + try { + const provider = await getModelsDevProvider(params.providerId) + if (!provider) { + return c.json({ error: "provider_not_found" }, 404) + } + + return c.json({ + provider: { + id: provider.id, + name: provider.name, + npm: provider.npm, + env: provider.env, + doc: provider.doc, + api: provider.api, + config: provider.config, + models: provider.models, + }, + }) + } catch (error) { + return c.json({ + error: "provider_catalog_unavailable", + message: error instanceof Error ? error.message : "Could not load the provider details.", + }, 502) + } + }, + ) + + app.get( + "/v1/llm-providers", + describeRoute({ + tags: ["LLM Providers"], + summary: "List organization LLM providers", + description: "Lists the LLM providers that the current organization member is allowed to see and potentially manage.", + responses: { + 200: jsonResponse("Accessible organization LLM providers returned successfully.", llmProviderListResponseSchema), + 400: jsonResponse("The provider list path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list organization LLM providers.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + resolveMemberTeamsMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const memberTeams = c.get("memberTeams") ?? [] + const providers = await loadLlmProviders({ + organizationId: payload.organization.id, + currentMemberId: payload.currentMember.id, + memberTeams, + isAdmin: isOrganizationAdmin(payload), + }) + + return c.json({ + llmProviders: providers.map((provider) => ({ + ...provider, + apiKey: undefined, + canManage: canManageLlmProvider(payload, provider), + })), + }) + }, + ) + + app.get( + "/v1/llm-providers/:llmProviderId/connect", + describeRoute({ + tags: ["LLM Providers"], + summary: "Get LLM provider connect payload", + description: "Returns one accessible organization LLM provider with the concrete model configuration needed to connect to it.", + responses: { + 200: jsonResponse("Provider connection payload returned successfully.", llmProviderResponseSchema), + 400: jsonResponse("The provider connect path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to connect to an organization LLM provider.", unauthorizedSchema), + 403: jsonResponse("Only members with access grants, the provider creator, or workspace admins can connect to this provider.", forbiddenSchema), + 404: jsonResponse("The provider could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgLlmProviderParamsSchema), + resolveOrganizationContextMiddleware, + resolveMemberTeamsMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const memberTeams = c.get("memberTeams") ?? [] + const params = c.req.valid("param") + + let llmProviderId: LlmProviderId + try { + llmProviderId = parseLlmProviderId(params.llmProviderId) + } catch { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + const providerRows = await db + .select() + .from(LlmProviderTable) + .where(and(eq(LlmProviderTable.id, llmProviderId), eq(LlmProviderTable.organizationId, payload.organization.id))) + .limit(1) + + const provider = providerRows[0] + if (!provider) { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + const accessible = await canAccessLlmProvider({ + organizationId: payload.organization.id, + llmProviderId, + currentMemberId: payload.currentMember.id, + memberTeams, + isAdmin: isOrganizationAdmin(payload), + }) + + if (!accessible) { + return c.json({ + error: "forbidden", + message: "You do not have access to this provider.", + }, 403) + } + + const models = await db + .select() + .from(LlmProviderModelTable) + .where(eq(LlmProviderModelTable.llmProviderId, llmProviderId)) + + return c.json({ + llmProvider: { + ...provider, + models: models + .map((model) => ({ + id: model.modelId, + name: model.name, + config: model.modelConfig, + createdAt: model.createdAt, + })) + .sort((left, right) => left.name.localeCompare(right.name)), + }, + }) + }, + ) + + app.post( + "/v1/llm-providers", + describeRoute({ + tags: ["LLM Providers"], + summary: "Create organization LLM provider", + description: "Creates a new organization-scoped LLM provider from either a models.dev provider template or a pasted custom configuration.", + responses: { + 201: jsonResponse("Organization LLM provider created successfully.", llmProviderResponseSchema), + 400: jsonResponse("The provider creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create organization LLM providers.", unauthorizedSchema), + 404: jsonResponse("A referenced provider, model, member, or team could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(llmProviderWriteSchema), + async (c) => { + const payload = c.get("organizationContext") + const input = c.req.valid("json") + + try { + const normalized = await normalizeLlmProviderInput(input) + const memberIds = await resolveMemberIds({ + organizationId: payload.organization.id, + values: input.memberIds, + }) + const teamIds = await resolveTeamIds({ + organizationId: payload.organization.id, + values: input.teamIds, + }) + + const llmProviderId = createDenTypeId("llmProvider") + const protectedMemberIds = [...new Set([payload.currentMember.id, ...memberIds])] + const now = new Date() + + await db.transaction(async (tx) => { + await tx.insert(LlmProviderTable).values({ + id: llmProviderId, + organizationId: payload.organization.id, + createdByOrgMembershipId: payload.currentMember.id, + source: normalized.source, + providerId: normalized.providerId, + name: normalized.name, + providerConfig: normalized.providerConfig, + apiKey: normalized.apiKey, + createdAt: now, + updatedAt: now, + }) + + if (normalized.models.length > 0) { + await tx.insert(LlmProviderModelTable).values( + normalized.models.map((model) => ({ + id: createDenTypeId("llmProviderModel"), + llmProviderId, + modelId: model.id, + name: model.name, + modelConfig: model.config, + createdAt: now, + })), + ) + } + + const accessRows = [ + ...protectedMemberIds.map((orgMembershipId) => ({ + id: createDenTypeId("llmProviderAccess"), + llmProviderId, + orgMembershipId, + teamId: null, + createdAt: now, + })), + ...teamIds.map((teamId) => ({ + id: createDenTypeId("llmProviderAccess"), + llmProviderId, + orgMembershipId: null, + teamId, + createdAt: now, + })), + ] + + if (accessRows.length > 0) { + await tx.insert(LlmProviderAccessTable).values(accessRows) + } + }) + + return c.json({ + llmProvider: { + id: llmProviderId, + organizationId: payload.organization.id, + createdByOrgMembershipId: payload.currentMember.id, + source: normalized.source, + providerId: normalized.providerId, + name: normalized.name, + providerConfig: normalized.providerConfig, + hasApiKey: Boolean(normalized.apiKey), + createdAt: now, + updatedAt: now, + }, + }, 201) + } catch (error) { + if (isRouteFailure(error)) { + return c.json( + { error: error.error, message: error.message }, + { status: error.status as 400 | 404 }, + ) + } + + throw error + } + }, + ) + + app.patch( + "/v1/llm-providers/:llmProviderId", + describeRoute({ + tags: ["LLM Providers"], + summary: "Update organization LLM provider", + description: "Updates an existing organization LLM provider, including its provider config, selected models, secret, and access grants.", + responses: { + 200: jsonResponse("Organization LLM provider updated successfully.", llmProviderResponseSchema), + 400: jsonResponse("The provider update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update organization LLM providers.", unauthorizedSchema), + 403: jsonResponse("Only the provider creator or a workspace admin can update providers.", forbiddenSchema), + 404: jsonResponse("The provider or a referenced resource could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgLlmProviderParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(llmProviderWriteSchema), + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const input = c.req.valid("json") + + let llmProviderId: LlmProviderId + try { + llmProviderId = parseLlmProviderId(params.llmProviderId) + } catch { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + const providerRows = await db + .select() + .from(LlmProviderTable) + .where(and(eq(LlmProviderTable.id, llmProviderId), eq(LlmProviderTable.organizationId, payload.organization.id))) + .limit(1) + + const provider = providerRows[0] + if (!provider) { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + if (!canManageLlmProvider(payload, provider)) { + return c.json({ + error: "forbidden", + message: "Only the provider creator or a workspace admin can update providers.", + }, 403) + } + + try { + const normalized = await normalizeLlmProviderInput(input) + const memberIds = await resolveMemberIds({ + organizationId: payload.organization.id, + values: input.memberIds, + }) + const teamIds = await resolveTeamIds({ + organizationId: payload.organization.id, + values: input.teamIds, + }) + const protectedMemberIds = [...new Set([provider.createdByOrgMembershipId, ...memberIds])] + const updatedAt = new Date() + + await db.transaction(async (tx) => { + await tx + .update(LlmProviderTable) + .set({ + source: normalized.source, + providerId: normalized.providerId, + name: normalized.name, + providerConfig: normalized.providerConfig, + apiKey: input.apiKey === undefined ? provider.apiKey : normalized.apiKey, + updatedAt, + }) + .where(eq(LlmProviderTable.id, provider.id)) + + await tx.delete(LlmProviderModelTable).where(eq(LlmProviderModelTable.llmProviderId, provider.id)) + await tx.delete(LlmProviderAccessTable).where(eq(LlmProviderAccessTable.llmProviderId, provider.id)) + + if (normalized.models.length > 0) { + await tx.insert(LlmProviderModelTable).values( + normalized.models.map((model) => ({ + id: createDenTypeId("llmProviderModel"), + llmProviderId: provider.id, + modelId: model.id, + name: model.name, + modelConfig: model.config, + createdAt: updatedAt, + })), + ) + } + + const accessRows = [ + ...protectedMemberIds.map((orgMembershipId) => ({ + id: createDenTypeId("llmProviderAccess"), + llmProviderId: provider.id, + orgMembershipId, + teamId: null, + createdAt: updatedAt, + })), + ...teamIds.map((teamId) => ({ + id: createDenTypeId("llmProviderAccess"), + llmProviderId: provider.id, + orgMembershipId: null, + teamId, + createdAt: updatedAt, + })), + ] + + if (accessRows.length > 0) { + await tx.insert(LlmProviderAccessTable).values(accessRows) + } + }) + + return c.json({ + llmProvider: { + ...provider, + source: normalized.source, + providerId: normalized.providerId, + name: normalized.name, + providerConfig: normalized.providerConfig, + hasApiKey: input.apiKey === undefined ? Boolean(provider.apiKey) : Boolean(normalized.apiKey), + updatedAt, + }, + }) + } catch (error) { + if (isRouteFailure(error)) { + return c.json( + { error: error.error, message: error.message }, + { status: error.status as 400 | 404 }, + ) + } + + throw error + } + }, + ) + + app.delete( + "/v1/llm-providers/:llmProviderId", + describeRoute({ + tags: ["LLM Providers"], + summary: "Delete organization LLM provider", + description: "Deletes an organization LLM provider and removes its models and access rules.", + responses: { + 204: emptyResponse("Organization LLM provider deleted successfully."), + 400: jsonResponse("The provider deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete organization LLM providers.", unauthorizedSchema), + 403: jsonResponse("Only the provider creator or a workspace admin can delete providers.", forbiddenSchema), + 404: jsonResponse("The provider could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgLlmProviderParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let llmProviderId: LlmProviderId + try { + llmProviderId = parseLlmProviderId(params.llmProviderId) + } catch { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + const providerRows = await db + .select() + .from(LlmProviderTable) + .where(and(eq(LlmProviderTable.id, llmProviderId), eq(LlmProviderTable.organizationId, payload.organization.id))) + .limit(1) + + const provider = providerRows[0] + if (!provider) { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + if (!canManageLlmProvider(payload, provider)) { + return c.json({ + error: "forbidden", + message: "Only the provider creator or a workspace admin can delete providers.", + }, 403) + } + + await db.transaction(async (tx) => { + await tx.delete(LlmProviderAccessTable).where(eq(LlmProviderAccessTable.llmProviderId, provider.id)) + await tx.delete(LlmProviderModelTable).where(eq(LlmProviderModelTable.llmProviderId, provider.id)) + await tx.delete(LlmProviderTable).where(eq(LlmProviderTable.id, provider.id)) + }) + + return c.body(null, 204) + }, + ) + + app.delete( + "/v1/llm-providers/:llmProviderId/access/:accessId", + describeRoute({ + tags: ["LLM Providers"], + summary: "Remove LLM provider access grant", + description: "Removes one explicit member or team access grant from an organization LLM provider.", + responses: { + 204: emptyResponse("Organization LLM provider access removed successfully."), + 400: jsonResponse("The provider access deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage provider access.", unauthorizedSchema), + 403: jsonResponse("Only the provider creator or a workspace admin can manage provider access.", forbiddenSchema), + 404: jsonResponse("The provider or access grant could not be found.", notFoundSchema), + 409: jsonResponse("The request tried to remove a protected provider access entry.", conflictSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgLlmProviderParamsSchema.extend(idParamSchema("accessId", "llmProviderAccess").shape)), + resolveOrganizationContextMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let llmProviderId: LlmProviderId + let accessId: LlmProviderAccessId + try { + llmProviderId = parseLlmProviderId(params.llmProviderId) + accessId = parseLlmProviderAccessId(params.accessId) + } catch { + return c.json({ error: "not_found" }, 404) + } + + const providerRows = await db + .select() + .from(LlmProviderTable) + .where(and(eq(LlmProviderTable.id, llmProviderId), eq(LlmProviderTable.organizationId, payload.organization.id))) + .limit(1) + + const provider = providerRows[0] + if (!provider) { + return c.json({ error: "llm_provider_not_found" }, 404) + } + + if (!canManageLlmProvider(payload, provider)) { + return c.json({ error: "forbidden", message: "Only the provider creator or a workspace admin can manage access." }, 403) + } + + const accessRows = await db + .select() + .from(LlmProviderAccessTable) + .where(and(eq(LlmProviderAccessTable.id, accessId), eq(LlmProviderAccessTable.llmProviderId, provider.id))) + .limit(1) + + const access = accessRows[0] + if (!access) { + return c.json({ error: "llm_provider_access_not_found" }, 404) + } + + if (access.orgMembershipId === provider.createdByOrgMembershipId) { + return c.json({ + error: "protected_access", + message: "The provider creator always keeps direct access.", + }, 409) + } + + await db.delete(LlmProviderAccessTable).where(eq(LlmProviderAccessTable.id, access.id)) + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/members.ts b/ee/apps/den-api/src/routes/org/members.ts new file mode 100644 index 0000000000..17c9df490e --- /dev/null +++ b/ee/apps/den-api/src/routes/org/members.ts @@ -0,0 +1,137 @@ +import { and, eq } from "@openwork-ee/den-db/drizzle" +import { MemberTable } from "@openwork-ee/den-db/schema" +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { jsonValidator, paramValidator, requireUserMiddleware, resolveOrganizationContextMiddleware } from "../../middleware/index.js" +import { emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, successSchema, unauthorizedSchema } from "../../openapi.js" +import { listAssignableRoles, removeOrganizationMember, roleIncludesOwner } from "../../orgs.js" +import type { OrgRouteVariables } from "./shared.js" +import { ensureOwner, idParamSchema, normalizeRoleName } from "./shared.js" + +const updateMemberRoleSchema = z.object({ + role: z.string().trim().min(1).max(64), +}) + +type MemberId = typeof MemberTable.$inferSelect.id +const orgMemberParamsSchema = idParamSchema("memberId", "member") + +export function registerOrgMemberRoutes(app: Hono) { + app.post( + "/v1/members/:memberId/role", + describeRoute({ + tags: ["Members"], + summary: "Update member role", + description: "Changes the role assigned to a specific organization member.", + responses: { + 200: jsonResponse("Member role updated successfully.", successSchema), + 400: jsonResponse("The member role update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update member roles.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners can update member roles.", forbiddenSchema), + 404: jsonResponse("The member or organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgMemberParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(updateMemberRoleSchema), + async (c) => { + const permission = ensureOwner(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const input = c.req.valid("json") + + const params = c.req.valid("param") + let memberId: MemberId + try { + memberId = normalizeDenTypeId("member", params.memberId) + } catch { + return c.json({ error: "member_not_found" }, 404) + } + + const memberRows = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.id, memberId), eq(MemberTable.organizationId, payload.organization.id))) + .limit(1) + + const member = memberRows[0] + if (!member) { + return c.json({ error: "member_not_found" }, 404) + } + + if (roleIncludesOwner(member.role)) { + return c.json({ error: "owner_role_locked", message: "The organization owner role cannot be changed." }, 400) + } + + const role = normalizeRoleName(input.role) + const availableRoles = await listAssignableRoles(payload.organization.id) + if (!availableRoles.has(role)) { + return c.json({ error: "invalid_role", message: "Choose one of the existing organization roles." }, 400) + } + + await db.update(MemberTable).set({ role }).where(eq(MemberTable.id, member.id)) + return c.json({ success: true }) + }, + ) + + app.delete( + "/v1/members/:memberId", + describeRoute({ + tags: ["Members"], + summary: "Remove organization member", + description: "Removes a member from an organization while protecting the owner role from deletion.", + responses: { + 204: emptyResponse("Member removed successfully."), + 400: jsonResponse("The member removal request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to remove organization members.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners can remove members.", forbiddenSchema), + 404: jsonResponse("The member or organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgMemberParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const permission = ensureOwner(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const params = c.req.valid("param") + let memberId: MemberId + try { + memberId = normalizeDenTypeId("member", params.memberId) + } catch { + return c.json({ error: "member_not_found" }, 404) + } + + const memberRows = await db + .select() + .from(MemberTable) + .where(and(eq(MemberTable.id, memberId), eq(MemberTable.organizationId, payload.organization.id))) + .limit(1) + + const member = memberRows[0] + if (!member) { + return c.json({ error: "member_not_found" }, 404) + } + + if (roleIncludesOwner(member.role)) { + return c.json({ error: "owner_role_locked", message: "The organization owner cannot be removed." }, 400) + } + + await removeOrganizationMember({ + organizationId: payload.organization.id, + memberId: member.id, + }) + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/plugin-system/access.ts b/ee/apps/den-api/src/routes/org/plugin-system/access.ts new file mode 100644 index 0000000000..7dd6efd759 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/access.ts @@ -0,0 +1,290 @@ +import { and, eq, inArray, isNull } from "@openwork-ee/den-db/drizzle" +import { + ConfigObjectAccessGrantTable, + ConfigObjectTable, + ConnectorInstanceAccessGrantTable, + ConnectorInstanceTable, + MarketplaceAccessGrantTable, + MarketplacePluginTable, + MarketplaceTable, + PluginAccessGrantTable, + PluginConfigObjectTable, + PluginTable, +} from "@openwork-ee/den-db/schema" +import type { MemberTeamSummary, OrganizationContext } from "../../../orgs.js" +import { db } from "../../../db.js" +import { memberHasRole } from "../shared.js" + +export type PluginArchResourceKind = "config_object" | "connector_instance" | "marketplace" | "plugin" +export type PluginArchRole = "viewer" | "editor" | "manager" +export type PluginArchCapability = "config_object.create" | "connector_account.create" | "connector_instance.create" | "marketplace.create" | "plugin.create" + +export type PluginArchActorContext = { + memberTeams: MemberTeamSummary[] + organizationContext: OrganizationContext +} + +type MemberId = OrganizationContext["currentMember"]["id"] +type TeamId = MemberTeamSummary["id"] +type ConfigObjectId = typeof ConfigObjectTable.$inferSelect.id +type MarketplaceId = typeof MarketplaceTable.$inferSelect.id +type PluginId = typeof PluginTable.$inferSelect.id +type ConnectorInstanceId = typeof ConnectorInstanceTable.$inferSelect.id +type ConfigObjectGrantRow = Pick +type MarketplaceGrantRow = Pick +type PluginGrantRow = Pick +type ConnectorInstanceGrantRow = Pick +type GrantRow = ConfigObjectGrantRow | MarketplaceGrantRow | PluginGrantRow | ConnectorInstanceGrantRow + +type MarketplaceResourceLookupInput = { + context: PluginArchActorContext + resourceId: MarketplaceId + resourceKind: "marketplace" +} + +type PluginResourceLookupInput = { + context: PluginArchActorContext + resourceId: PluginId + resourceKind: "plugin" +} + +type ConnectorInstanceResourceLookupInput = { + context: PluginArchActorContext + resourceId: ConnectorInstanceId + resourceKind: "connector_instance" +} + +type ConfigObjectResourceLookupInput = { + context: PluginArchActorContext + resourceId: ConfigObjectId + resourceKind: "config_object" +} + +type ResourceLookupInput = + | PluginResourceLookupInput + | ConnectorInstanceResourceLookupInput + | MarketplaceResourceLookupInput + | ConfigObjectResourceLookupInput + +type RequireResourceRoleInput = ResourceLookupInput & { role: PluginArchRole } + +export class PluginArchAuthorizationError extends Error { + constructor( + readonly status: 403, + readonly error: "forbidden", + message: string, + ) { + super(message) + this.name = "PluginArchAuthorizationError" + } +} + +const rolePriority: Record = { + viewer: 1, + editor: 2, + manager: 3, +} + +function maxRole(current: PluginArchRole | null, candidate: PluginArchRole | null) { + if (!candidate) return current + if (!current) return candidate + return rolePriority[candidate] > rolePriority[current] ? candidate : current +} + +export function isPluginArchOrgAdmin(context: PluginArchActorContext) { + return context.organizationContext.currentMember.isOwner || memberHasRole(context.organizationContext.currentMember.role, "admin") +} + +export function hasPluginArchCapability(context: PluginArchActorContext, _capability: PluginArchCapability) { + return isPluginArchOrgAdmin(context) +} + +function roleSatisfies(role: PluginArchRole | null, required: PluginArchRole) { + if (!role) return false + return rolePriority[role] >= rolePriority[required] +} + +export function resolvePluginArchGrantRole(input: { + grants: GrantRow[] + memberId: MemberId + teamIds: TeamId[] +}) { + const teamIds = new Set(input.teamIds) + let resolved: PluginArchRole | null = null + + for (const grant of input.grants) { + if (grant.removedAt) continue + const applies = grant.orgWide || grant.orgMembershipId === input.memberId || (grant.teamId ? teamIds.has(grant.teamId) : false) + if (!applies) continue + resolved = maxRole(resolved, grant.role) + } + + return resolved +} + +async function resolveGrantRole(input: { + grants: GrantRow[] + context: PluginArchActorContext +}) { + return resolvePluginArchGrantRole({ + grants: input.grants, + memberId: input.context.organizationContext.currentMember.id, + teamIds: input.context.memberTeams.map((team) => team.id), + }) +} + +async function resolvePluginRoleForIds(context: PluginArchActorContext, pluginIds: PluginId[]) { + if (pluginIds.length === 0) { + return null + } + + if (isPluginArchOrgAdmin(context)) { + return "manager" satisfies PluginArchRole + } + + const grants = await db + .select({ + orgMembershipId: PluginAccessGrantTable.orgMembershipId, + orgWide: PluginAccessGrantTable.orgWide, + removedAt: PluginAccessGrantTable.removedAt, + role: PluginAccessGrantTable.role, + teamId: PluginAccessGrantTable.teamId, + }) + .from(PluginAccessGrantTable) + .where(inArray(PluginAccessGrantTable.pluginId, pluginIds)) + + return resolveGrantRole({ context, grants }) +} + +async function resolveMarketplaceRoleForIds(context: PluginArchActorContext, marketplaceIds: MarketplaceId[]) { + if (marketplaceIds.length === 0) { + return null + } + + if (isPluginArchOrgAdmin(context)) { + return "manager" satisfies PluginArchRole + } + + const grants = await db + .select({ + orgMembershipId: MarketplaceAccessGrantTable.orgMembershipId, + orgWide: MarketplaceAccessGrantTable.orgWide, + removedAt: MarketplaceAccessGrantTable.removedAt, + role: MarketplaceAccessGrantTable.role, + teamId: MarketplaceAccessGrantTable.teamId, + }) + .from(MarketplaceAccessGrantTable) + .where(inArray(MarketplaceAccessGrantTable.marketplaceId, marketplaceIds)) + + return resolveGrantRole({ context, grants }) +} + +export async function resolvePluginArchResourceRole(input: ResourceLookupInput) { + if (isPluginArchOrgAdmin(input.context)) { + return "manager" satisfies PluginArchRole + } + + if (input.resourceKind === "marketplace") { + const grants = await db + .select({ + orgMembershipId: MarketplaceAccessGrantTable.orgMembershipId, + orgWide: MarketplaceAccessGrantTable.orgWide, + removedAt: MarketplaceAccessGrantTable.removedAt, + role: MarketplaceAccessGrantTable.role, + teamId: MarketplaceAccessGrantTable.teamId, + }) + .from(MarketplaceAccessGrantTable) + .where(eq(MarketplaceAccessGrantTable.marketplaceId, input.resourceId)) + return resolveGrantRole({ context: input.context, grants }) + } + + if (input.resourceKind === "plugin") { + const grants = await db + .select({ + orgMembershipId: PluginAccessGrantTable.orgMembershipId, + orgWide: PluginAccessGrantTable.orgWide, + removedAt: PluginAccessGrantTable.removedAt, + role: PluginAccessGrantTable.role, + teamId: PluginAccessGrantTable.teamId, + }) + .from(PluginAccessGrantTable) + .where(eq(PluginAccessGrantTable.pluginId, input.resourceId)) + const resolved = await resolveGrantRole({ context: input.context, grants }) + if (resolved) { + return resolved + } + + const memberships = await db + .select({ marketplaceId: MarketplacePluginTable.marketplaceId }) + .from(MarketplacePluginTable) + .where(and(eq(MarketplacePluginTable.pluginId, input.resourceId), isNull(MarketplacePluginTable.removedAt))) + + const marketplaceRole = await resolveMarketplaceRoleForIds(input.context, memberships.map((membership) => membership.marketplaceId)) + return maxRole(resolved, marketplaceRole ? "viewer" : null) + } + + if (input.resourceKind === "connector_instance") { + const grants = await db + .select({ + orgMembershipId: ConnectorInstanceAccessGrantTable.orgMembershipId, + orgWide: ConnectorInstanceAccessGrantTable.orgWide, + removedAt: ConnectorInstanceAccessGrantTable.removedAt, + role: ConnectorInstanceAccessGrantTable.role, + teamId: ConnectorInstanceAccessGrantTable.teamId, + }) + .from(ConnectorInstanceAccessGrantTable) + .where(eq(ConnectorInstanceAccessGrantTable.connectorInstanceId, input.resourceId)) + return resolveGrantRole({ context: input.context, grants }) + } + + const directGrants = await db + .select({ + orgMembershipId: ConfigObjectAccessGrantTable.orgMembershipId, + orgWide: ConfigObjectAccessGrantTable.orgWide, + removedAt: ConfigObjectAccessGrantTable.removedAt, + role: ConfigObjectAccessGrantTable.role, + teamId: ConfigObjectAccessGrantTable.teamId, + }) + .from(ConfigObjectAccessGrantTable) + .where(eq(ConfigObjectAccessGrantTable.configObjectId, input.resourceId)) + + let resolved = await resolveGrantRole({ context: input.context, grants: directGrants }) + if (resolved) { + return resolved + } + + const memberships = await db + .select({ pluginId: PluginConfigObjectTable.pluginId }) + .from(PluginConfigObjectTable) + .where(and(eq(PluginConfigObjectTable.configObjectId, input.resourceId), isNull(PluginConfigObjectTable.removedAt))) + + const pluginRole = await resolvePluginRoleForIds(input.context, memberships.map((membership) => membership.pluginId)) + resolved = maxRole(resolved, pluginRole ? "viewer" : null) + return resolved +} + +export async function requirePluginArchCapability(context: PluginArchActorContext, capability: PluginArchCapability) { + if (hasPluginArchCapability(context, capability)) { + return + } + + throw new PluginArchAuthorizationError(403, "forbidden", `Missing organization capability: ${capability}`) +} + +export async function requirePluginArchResourceRole(input: { + context: PluginArchActorContext + resourceId: ConfigObjectId | ConnectorInstanceId | MarketplaceId | PluginId + resourceKind: PluginArchResourceKind + role: PluginArchRole +}) { + const resolved = await resolvePluginArchResourceRole(input as RequireResourceRoleInput) + if (roleSatisfies(resolved, input.role)) { + return resolved + } + + throw new PluginArchAuthorizationError( + 403, + "forbidden", + `Missing ${input.role} access for ${input.resourceKind.replace(/_/g, " ")}.`, + ) +} diff --git a/ee/apps/den-api/src/routes/org/plugin-system/connector-cleanup.ts b/ee/apps/den-api/src/routes/org/plugin-system/connector-cleanup.ts new file mode 100644 index 0000000000..3be0e77946 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/connector-cleanup.ts @@ -0,0 +1,58 @@ +export type ConnectorCleanupMarketplaceMembership = { + marketplaceId: TMarketplaceId + membershipSource: "api" | "connector" | "manual" | "system" + pluginId: TPluginId +} + +export function uniqueIds(values: TId[]) { + return [...new Set(values)] +} + +export function planConnectorImportedResourceCleanup(input: { + activeMarketplaceMemberships: Array> + activeMappingPluginIds: TPluginId[] + activePluginMembershipPluginIds: TPluginId[] + candidateMarketplaceIds: TMarketplaceId[] + candidatePluginIds: TPluginId[] +}) { + const candidateMarketplaceIds = uniqueIds(input.candidateMarketplaceIds) + const candidatePluginIds = uniqueIds(input.candidatePluginIds) + const candidateMarketplaceIdSet = new Set(candidateMarketplaceIds) + const activeMappingPluginIdSet = new Set(input.activeMappingPluginIds) + const activePluginMembershipPluginIdSet = new Set(input.activePluginMembershipPluginIds) + + const marketplaceMembershipsByPlugin = new Map>>() + const marketplaceMembershipsByMarketplace = new Map>>() + for (const membership of input.activeMarketplaceMemberships) { + const membershipsForPlugin = marketplaceMembershipsByPlugin.get(membership.pluginId) ?? [] + membershipsForPlugin.push(membership) + marketplaceMembershipsByPlugin.set(membership.pluginId, membershipsForPlugin) + + const membershipsForMarketplace = marketplaceMembershipsByMarketplace.get(membership.marketplaceId) ?? [] + membershipsForMarketplace.push(membership) + marketplaceMembershipsByMarketplace.set(membership.marketplaceId, membershipsForMarketplace) + } + + const pluginIdsToDelete = candidatePluginIds.filter((pluginId) => { + if (activeMappingPluginIdSet.has(pluginId) || activePluginMembershipPluginIdSet.has(pluginId)) { + return false + } + + const activeMarketplaceMemberships = marketplaceMembershipsByPlugin.get(pluginId) ?? [] + const hasNonConnectorDependency = activeMarketplaceMemberships.some((membership) => ( + !candidateMarketplaceIdSet.has(membership.marketplaceId) || membership.membershipSource !== "connector" + )) + return !hasNonConnectorDependency + }) + const pluginIdsToDeleteSet = new Set(pluginIdsToDelete) + + const marketplaceIdsToDelete = candidateMarketplaceIds.filter((marketplaceId) => { + const memberships = marketplaceMembershipsByMarketplace.get(marketplaceId) ?? [] + return memberships.every((membership) => pluginIdsToDeleteSet.has(membership.pluginId)) + }) + + return { + marketplaceIdsToDelete, + pluginIdsToDelete, + } +} diff --git a/ee/apps/den-api/src/routes/org/plugin-system/contracts.ts b/ee/apps/den-api/src/routes/org/plugin-system/contracts.ts new file mode 100644 index 0000000000..a3db498b7e --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/contracts.ts @@ -0,0 +1,1012 @@ +import type { z } from "zod" +import { + accessGrantListResponseSchema, + accessGrantMutationResponseSchema, + configObjectAccessGrantParamsSchema, + configObjectCreateSchema, + configObjectCreateVersionSchema, + configObjectDetailResponseSchema, + configObjectListQuerySchema, + configObjectListResponseSchema, + configObjectMutationResponseSchema, + configObjectParamsSchema, + configObjectPluginAttachSchema, + configObjectVersionDetailResponseSchema, + configObjectVersionListQuerySchema, + configObjectVersionListResponseSchema, + configObjectVersionParamsSchema, + connectorAccountCreateSchema, + connectorAccountDetailResponseSchema, + connectorAccountDisconnectSchema, + connectorAccountListQuerySchema, + connectorAccountListResponseSchema, + connectorAccountDisconnectResponseSchema, + connectorAccountMutationResponseSchema, + connectorInstanceAutoImportSchema, + connectorInstanceConfigurationResponseSchema, + connectorInstanceRemoveResponseSchema, + connectorAccountParamsSchema, + connectorAccountRepositoryParamsSchema, + connectorInstanceAccessGrantParamsSchema, + connectorInstanceCreateSchema, + connectorInstanceDetailResponseSchema, + connectorInstanceListQuerySchema, + connectorInstanceListResponseSchema, + connectorInstanceMutationResponseSchema, + connectorInstanceParamsSchema, + connectorInstanceUpdateSchema, + connectorMappingCreateSchema, + connectorMappingListQuerySchema, + connectorMappingListResponseSchema, + connectorMappingMutationResponseSchema, + connectorMappingParamsSchema, + connectorMappingUpdateSchema, + connectorSyncAsyncResponseSchema, + connectorSyncEventDetailResponseSchema, + connectorSyncEventListQuerySchema, + connectorSyncEventListResponseSchema, + connectorSyncEventParamsSchema, + connectorTargetCreateSchema, + connectorTargetDetailResponseSchema, + connectorTargetListQuerySchema, + connectorTargetListResponseSchema, + connectorTargetMutationResponseSchema, + connectorTargetParamsSchema, + connectorTargetUpdateSchema, + githubConnectorAccountCreateSchema, + githubConnectorDiscoveryResponseSchema, + githubDiscoveryApplyResponseSchema, + githubDiscoveryApplySchema, + githubDiscoveryTreeQuerySchema, + githubDiscoveryTreeResponseSchema, + githubInstallCompleteResponseSchema, + githubInstallCompleteSchema, + githubInstallStartResponseSchema, + githubInstallStartSchema, + githubConnectorSetupSchema, + githubRepositoryListQuerySchema, + githubRepositoryListResponseSchema, + githubSetupResponseSchema, + githubValidateTargetResponseSchema, + githubValidateTargetSchema, + githubWebhookAcceptedResponseSchema, + githubWebhookHeadersSchema, + githubWebhookIgnoredResponseSchema, + githubWebhookRawBodySchema, + githubWebhookUnauthorizedResponseSchema, + marketplaceAccessGrantParamsSchema, + marketplaceCreateSchema, + marketplaceDetailResponseSchema, + marketplaceListQuerySchema, + marketplaceListResponseSchema, + marketplaceMutationResponseSchema, + marketplaceParamsSchema, + marketplacePluginListResponseSchema, + marketplaceResolvedResponseSchema, + marketplacePluginMutationResponseSchema, + marketplacePluginParamsSchema, + marketplacePluginWriteSchema, + marketplaceUpdateSchema, + pluginAccessGrantParamsSchema, + pluginConfigObjectParamsSchema, + pluginCreateSchema, + pluginDetailResponseSchema, + pluginListQuerySchema, + pluginListResponseSchema, + pluginMembershipMutationResponseSchema, + pluginMembershipListResponseSchema, + pluginMembershipWriteSchema, + pluginMutationResponseSchema, + pluginParamsSchema, + pluginUpdateSchema, + resourceAccessGrantWriteSchema, +} from "./schemas.js" + +type EndpointMethod = "DELETE" | "GET" | "PATCH" | "POST" +type EndpointAudience = "admin" | "public_webhook" +type EndpointTag = "Config Objects" | "Plugins" | "Marketplaces" | "Connectors" | "GitHub" | "Webhooks" + +type EndpointContract = { + audience: EndpointAudience + description: string + method: EndpointMethod + path: string + request?: { + body?: z.ZodTypeAny + headers?: z.ZodTypeAny + params?: z.ZodTypeAny + query?: z.ZodTypeAny + } + response: { + description: string + schema?: z.ZodTypeAny + status: 200 | 201 | 202 | 204 | 401 + } + tag: EndpointTag +} + +type DeferredEndpointContract = { + description: string + method: EndpointMethod + path: string + reason: string + tag: EndpointTag +} + +const orgBasePath = "/v1" + +export const pluginArchRoutePaths = { + configObjects: `${orgBasePath}/config-objects`, + configObject: `${orgBasePath}/config-objects/:configObjectId`, + configObjectArchive: `${orgBasePath}/config-objects/:configObjectId/archive`, + configObjectDelete: `${orgBasePath}/config-objects/:configObjectId/delete`, + configObjectRestore: `${orgBasePath}/config-objects/:configObjectId/restore`, + configObjectPlugins: `${orgBasePath}/config-objects/:configObjectId/plugins`, + configObjectPlugin: `${orgBasePath}/config-objects/:configObjectId/plugins/:pluginId`, + configObjectAccess: `${orgBasePath}/config-objects/:configObjectId/access`, + configObjectAccessGrant: `${orgBasePath}/config-objects/:configObjectId/access/:grantId`, + configObjectVersions: `${orgBasePath}/config-objects/:configObjectId/versions`, + configObjectVersion: `${orgBasePath}/config-objects/:configObjectId/versions/:versionId`, + configObjectLatestVersion: `${orgBasePath}/config-objects/:configObjectId/versions/latest`, + configObjectCompareVersions: `${orgBasePath}/config-objects/:configObjectId/versions/compare`, + skills: `${orgBasePath}/skills`, + agents: `${orgBasePath}/agents`, + commands: `${orgBasePath}/commands`, + tools: `${orgBasePath}/tools`, + mcps: `${orgBasePath}/mcps`, + plugins: `${orgBasePath}/plugins`, + plugin: `${orgBasePath}/plugins/:pluginId`, + pluginArchive: `${orgBasePath}/plugins/:pluginId/archive`, + pluginRestore: `${orgBasePath}/plugins/:pluginId/restore`, + pluginConfigObjects: `${orgBasePath}/plugins/:pluginId/config-objects`, + pluginConfigObject: `${orgBasePath}/plugins/:pluginId/config-objects/:configObjectId`, + pluginResolved: `${orgBasePath}/plugins/:pluginId/resolved`, + pluginReleases: `${orgBasePath}/plugins/:pluginId/releases`, + pluginAccess: `${orgBasePath}/plugins/:pluginId/access`, + pluginAccessGrant: `${orgBasePath}/plugins/:pluginId/access/:grantId`, + marketplaces: `${orgBasePath}/marketplaces`, + marketplace: `${orgBasePath}/marketplaces/:marketplaceId`, + marketplaceResolved: `${orgBasePath}/marketplaces/:marketplaceId/resolved`, + marketplaceArchive: `${orgBasePath}/marketplaces/:marketplaceId/archive`, + marketplaceRestore: `${orgBasePath}/marketplaces/:marketplaceId/restore`, + marketplacePlugins: `${orgBasePath}/marketplaces/:marketplaceId/plugins`, + marketplacePlugin: `${orgBasePath}/marketplaces/:marketplaceId/plugins/:pluginId`, + marketplaceAccess: `${orgBasePath}/marketplaces/:marketplaceId/access`, + marketplaceAccessGrant: `${orgBasePath}/marketplaces/:marketplaceId/access/:grantId`, + connectorAccounts: `${orgBasePath}/connector-accounts`, + connectorAccount: `${orgBasePath}/connector-accounts/:connectorAccountId`, + connectorAccountDisconnect: `${orgBasePath}/connector-accounts/:connectorAccountId/disconnect`, + connectorInstances: `${orgBasePath}/connector-instances`, + connectorInstance: `${orgBasePath}/connector-instances/:connectorInstanceId`, + connectorInstanceConfiguration: `${orgBasePath}/connector-instances/:connectorInstanceId/configuration`, + connectorInstanceAutoImport: `${orgBasePath}/connector-instances/:connectorInstanceId/auto-import`, + connectorInstanceRemove: `${orgBasePath}/connector-instances/:connectorInstanceId/remove`, + connectorInstanceArchive: `${orgBasePath}/connector-instances/:connectorInstanceId/archive`, + connectorInstanceDisable: `${orgBasePath}/connector-instances/:connectorInstanceId/disable`, + connectorInstanceEnable: `${orgBasePath}/connector-instances/:connectorInstanceId/enable`, + connectorInstanceAccess: `${orgBasePath}/connector-instances/:connectorInstanceId/access`, + connectorInstanceAccessGrant: `${orgBasePath}/connector-instances/:connectorInstanceId/access/:grantId`, + connectorTargets: `${orgBasePath}/connector-instances/:connectorInstanceId/targets`, + connectorTarget: `${orgBasePath}/connector-targets/:connectorTargetId`, + connectorTargetResync: `${orgBasePath}/connector-targets/:connectorTargetId/resync`, + connectorTargetMappings: `${orgBasePath}/connector-targets/:connectorTargetId/mappings`, + connectorMapping: `${orgBasePath}/connector-mappings/:connectorMappingId`, + connectorMappingPreview: `${orgBasePath}/connector-mappings/:connectorMappingId/preview`, + connectorSyncEvents: `${orgBasePath}/connector-sync-events`, + connectorSyncEvent: `${orgBasePath}/connector-sync-events/:connectorSyncEventId`, + connectorSyncEventRetry: `${orgBasePath}/connector-sync-events/:connectorSyncEventId/retry`, + connectorInstanceDiscovery: `${orgBasePath}/connector-instances/:connectorInstanceId/discovery`, + connectorInstanceDiscoveryApply: `${orgBasePath}/connector-instances/:connectorInstanceId/discovery/apply`, + connectorInstanceDiscoveryTree: `${orgBasePath}/connector-instances/:connectorInstanceId/discovery/tree`, + githubInstallStart: `${orgBasePath}/connectors/github/install/start`, + githubInstallComplete: `${orgBasePath}/connectors/github/install/complete`, + githubSetup: `${orgBasePath}/connectors/github/setup`, + githubAccounts: `${orgBasePath}/connectors/github/accounts`, + githubAccountRepositories: `${orgBasePath}/connectors/github/accounts/:connectorAccountId/repositories`, + githubValidateTarget: `${orgBasePath}/connectors/github/validate-target`, + githubWebhookIngress: "/v1/webhooks/connectors/github", +} as const + +export const pluginArchEndpointContracts: Record = { + listConfigObjects: { + audience: "admin", + description: "List current config object projections with search and connector filters.", + method: "GET", + path: pluginArchRoutePaths.configObjects, + request: { query: configObjectListQuerySchema }, + response: { description: "Current config object rows.", schema: configObjectListResponseSchema, status: 200 }, + tag: "Config Objects", + }, + getConfigObject: { + audience: "admin", + description: "Get one config object with its latest version projection.", + method: "GET", + path: pluginArchRoutePaths.configObject, + request: { params: configObjectParamsSchema }, + response: { description: "Current config object detail.", schema: configObjectDetailResponseSchema, status: 200 }, + tag: "Config Objects", + }, + createConfigObject: { + audience: "admin", + description: "Create a cloud or imported config object and optionally attach it to plugins.", + method: "POST", + path: pluginArchRoutePaths.configObjects, + request: { body: configObjectCreateSchema }, + response: { description: "Config object created successfully.", schema: configObjectMutationResponseSchema, status: 201 }, + tag: "Config Objects", + }, + createConfigObjectVersion: { + audience: "admin", + description: "Create a new immutable version for an existing config object.", + method: "POST", + path: pluginArchRoutePaths.configObjectVersions, + request: { body: configObjectCreateVersionSchema, params: configObjectParamsSchema }, + response: { description: "Latest config object detail after version creation.", schema: configObjectMutationResponseSchema, status: 201 }, + tag: "Config Objects", + }, + archiveConfigObject: { + audience: "admin", + description: "Archive a config object without removing history.", + method: "POST", + path: pluginArchRoutePaths.configObjectArchive, + request: { params: configObjectParamsSchema }, + response: { description: "Archived config object detail.", schema: configObjectMutationResponseSchema, status: 200 }, + tag: "Config Objects", + }, + deleteConfigObject: { + audience: "admin", + description: "Soft-delete a config object while preserving history.", + method: "POST", + path: pluginArchRoutePaths.configObjectDelete, + request: { params: configObjectParamsSchema }, + response: { description: "Deleted config object detail.", schema: configObjectMutationResponseSchema, status: 200 }, + tag: "Config Objects", + }, + restoreConfigObject: { + audience: "admin", + description: "Restore a deleted or archived config object.", + method: "POST", + path: pluginArchRoutePaths.configObjectRestore, + request: { params: configObjectParamsSchema }, + response: { description: "Restored config object detail.", schema: configObjectMutationResponseSchema, status: 200 }, + tag: "Config Objects", + }, + listConfigObjectPlugins: { + audience: "admin", + description: "List the plugins that currently include a config object.", + method: "GET", + path: pluginArchRoutePaths.configObjectPlugins, + request: { params: configObjectParamsSchema }, + response: { description: "Plugin memberships for the config object.", schema: pluginMembershipListResponseSchema, status: 200 }, + tag: "Config Objects", + }, + attachConfigObjectToPlugin: { + audience: "admin", + description: "Attach a config object to a plugin using plugin-scoped write access.", + method: "POST", + path: pluginArchRoutePaths.configObjectPlugins, + request: { body: configObjectPluginAttachSchema, params: configObjectParamsSchema }, + response: { description: "Plugin membership created successfully.", schema: pluginMembershipMutationResponseSchema, status: 201 }, + tag: "Config Objects", + }, + removeConfigObjectFromPlugin: { + audience: "admin", + description: "Remove one active plugin membership from a config object.", + method: "DELETE", + path: pluginArchRoutePaths.configObjectPlugin, + request: { params: configObjectParamsSchema.extend({ pluginId: pluginParamsSchema.shape.pluginId }) }, + response: { description: "Plugin membership removed successfully.", status: 204 }, + tag: "Config Objects", + }, + listConfigObjectAccess: { + audience: "admin", + description: "List direct, team, and org-wide grants for a config object.", + method: "GET", + path: pluginArchRoutePaths.configObjectAccess, + request: { params: configObjectParamsSchema }, + response: { description: "Config object access grants.", schema: accessGrantListResponseSchema, status: 200 }, + tag: "Config Objects", + }, + grantConfigObjectAccess: { + audience: "admin", + description: "Create one direct, team, or org-wide access grant for a config object.", + method: "POST", + path: pluginArchRoutePaths.configObjectAccess, + request: { body: resourceAccessGrantWriteSchema, params: configObjectParamsSchema }, + response: { description: "Config object access grant created successfully.", schema: accessGrantMutationResponseSchema, status: 201 }, + tag: "Config Objects", + }, + revokeConfigObjectAccess: { + audience: "admin", + description: "Soft-revoke one config object access grant.", + method: "DELETE", + path: pluginArchRoutePaths.configObjectAccessGrant, + request: { params: configObjectAccessGrantParamsSchema }, + response: { description: "Config object access grant revoked successfully.", status: 204 }, + tag: "Config Objects", + }, + listConfigObjectVersions: { + audience: "admin", + description: "List immutable versions for a config object.", + method: "GET", + path: pluginArchRoutePaths.configObjectVersions, + request: { params: configObjectParamsSchema, query: configObjectVersionListQuerySchema }, + response: { description: "Config object versions.", schema: configObjectVersionListResponseSchema, status: 200 }, + tag: "Config Objects", + }, + getConfigObjectVersion: { + audience: "admin", + description: "Get one immutable config object version.", + method: "GET", + path: pluginArchRoutePaths.configObjectVersion, + request: { params: configObjectVersionParamsSchema }, + response: { description: "Config object version detail.", schema: configObjectVersionDetailResponseSchema, status: 200 }, + tag: "Config Objects", + }, + getLatestConfigObjectVersion: { + audience: "admin", + description: "Resolve the latest config object version using created_at and id ordering.", + method: "GET", + path: pluginArchRoutePaths.configObjectLatestVersion, + request: { params: configObjectParamsSchema }, + response: { description: "Latest config object version detail.", schema: configObjectVersionDetailResponseSchema, status: 200 }, + tag: "Config Objects", + }, + listPlugins: { + audience: "admin", + description: "List accessible plugins for the organization.", + method: "GET", + path: pluginArchRoutePaths.plugins, + request: { query: pluginListQuerySchema }, + response: { description: "Plugin list.", schema: pluginListResponseSchema, status: 200 }, + tag: "Plugins", + }, + getPlugin: { + audience: "admin", + description: "Get one plugin and its current metadata.", + method: "GET", + path: pluginArchRoutePaths.plugin, + request: { params: pluginParamsSchema }, + response: { description: "Plugin detail.", schema: pluginDetailResponseSchema, status: 200 }, + tag: "Plugins", + }, + createPlugin: { + audience: "admin", + description: "Create a private-by-default plugin.", + method: "POST", + path: pluginArchRoutePaths.plugins, + request: { body: pluginCreateSchema }, + response: { description: "Plugin created successfully.", schema: pluginMutationResponseSchema, status: 201 }, + tag: "Plugins", + }, + updatePlugin: { + audience: "admin", + description: "Patch plugin metadata.", + method: "PATCH", + path: pluginArchRoutePaths.plugin, + request: { body: pluginUpdateSchema, params: pluginParamsSchema }, + response: { description: "Plugin updated successfully.", schema: pluginMutationResponseSchema, status: 200 }, + tag: "Plugins", + }, + archivePlugin: { + audience: "admin", + description: "Archive a plugin without deleting membership history.", + method: "POST", + path: pluginArchRoutePaths.pluginArchive, + request: { params: pluginParamsSchema }, + response: { description: "Archived plugin detail.", schema: pluginMutationResponseSchema, status: 200 }, + tag: "Plugins", + }, + restorePlugin: { + audience: "admin", + description: "Restore an archived or deleted plugin.", + method: "POST", + path: pluginArchRoutePaths.pluginRestore, + request: { params: pluginParamsSchema }, + response: { description: "Restored plugin detail.", schema: pluginMutationResponseSchema, status: 200 }, + tag: "Plugins", + }, + listPluginConfigObjects: { + audience: "admin", + description: "List plugin memberships and the current config object projections they reference.", + method: "GET", + path: pluginArchRoutePaths.pluginConfigObjects, + request: { params: pluginParamsSchema }, + response: { description: "Plugin memberships.", schema: pluginMembershipListResponseSchema, status: 200 }, + tag: "Plugins", + }, + addPluginConfigObject: { + audience: "admin", + description: "Add a config object to a plugin using plugin-scoped write access.", + method: "POST", + path: pluginArchRoutePaths.pluginConfigObjects, + request: { body: pluginMembershipWriteSchema, params: pluginParamsSchema }, + response: { description: "Plugin membership created successfully.", schema: pluginMembershipMutationResponseSchema, status: 201 }, + tag: "Plugins", + }, + removePluginConfigObject: { + audience: "admin", + description: "Remove one config object membership from a plugin.", + method: "DELETE", + path: pluginArchRoutePaths.pluginConfigObject, + request: { params: pluginConfigObjectParamsSchema }, + response: { description: "Plugin membership removed successfully.", status: 204 }, + tag: "Plugins", + }, + getResolvedPlugin: { + audience: "admin", + description: "Preview the resolved latest-version members of a plugin without invoking delivery.", + method: "GET", + path: pluginArchRoutePaths.pluginResolved, + request: { params: pluginParamsSchema }, + response: { description: "Resolved plugin membership view.", schema: pluginMembershipListResponseSchema, status: 200 }, + tag: "Plugins", + }, + listPluginAccess: { + audience: "admin", + description: "List direct, team, and org-wide grants for a plugin.", + method: "GET", + path: pluginArchRoutePaths.pluginAccess, + request: { params: pluginParamsSchema }, + response: { description: "Plugin access grants.", schema: accessGrantListResponseSchema, status: 200 }, + tag: "Plugins", + }, + grantPluginAccess: { + audience: "admin", + description: "Create one direct, team, or org-wide access grant for a plugin.", + method: "POST", + path: pluginArchRoutePaths.pluginAccess, + request: { body: resourceAccessGrantWriteSchema, params: pluginParamsSchema }, + response: { description: "Plugin access grant created successfully.", schema: accessGrantMutationResponseSchema, status: 201 }, + tag: "Plugins", + }, + revokePluginAccess: { + audience: "admin", + description: "Soft-revoke one plugin access grant.", + method: "DELETE", + path: pluginArchRoutePaths.pluginAccessGrant, + request: { params: pluginAccessGrantParamsSchema }, + response: { description: "Plugin access grant revoked successfully.", status: 204 }, + tag: "Plugins", + }, + listMarketplaces: { + audience: "admin", + description: "List accessible marketplaces for the organization.", + method: "GET", + path: pluginArchRoutePaths.marketplaces, + request: { query: marketplaceListQuerySchema }, + response: { description: "Marketplace list.", schema: marketplaceListResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + getMarketplace: { + audience: "admin", + description: "Get one marketplace and its current metadata.", + method: "GET", + path: pluginArchRoutePaths.marketplace, + request: { params: marketplaceParamsSchema }, + response: { description: "Marketplace detail.", schema: marketplaceDetailResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + createMarketplace: { + audience: "admin", + description: "Create a private-by-default marketplace.", + method: "POST", + path: pluginArchRoutePaths.marketplaces, + request: { body: marketplaceCreateSchema }, + response: { description: "Marketplace created successfully.", schema: marketplaceMutationResponseSchema, status: 201 }, + tag: "Marketplaces", + }, + updateMarketplace: { + audience: "admin", + description: "Patch marketplace metadata.", + method: "PATCH", + path: pluginArchRoutePaths.marketplace, + request: { body: marketplaceUpdateSchema, params: marketplaceParamsSchema }, + response: { description: "Marketplace updated successfully.", schema: marketplaceMutationResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + archiveMarketplace: { + audience: "admin", + description: "Archive a marketplace without deleting membership history.", + method: "POST", + path: pluginArchRoutePaths.marketplaceArchive, + request: { params: marketplaceParamsSchema }, + response: { description: "Archived marketplace detail.", schema: marketplaceMutationResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + restoreMarketplace: { + audience: "admin", + description: "Restore an archived or deleted marketplace.", + method: "POST", + path: pluginArchRoutePaths.marketplaceRestore, + request: { params: marketplaceParamsSchema }, + response: { description: "Restored marketplace detail.", schema: marketplaceMutationResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + listMarketplacePlugins: { + audience: "admin", + description: "List marketplace memberships and the plugins they reference.", + method: "GET", + path: pluginArchRoutePaths.marketplacePlugins, + request: { params: marketplaceParamsSchema }, + response: { description: "Marketplace plugin memberships.", schema: marketplacePluginListResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + getMarketplaceResolved: { + audience: "admin", + description: "Return marketplace detail with plugins and derived source info.", + method: "GET", + path: pluginArchRoutePaths.marketplaceResolved, + request: { params: marketplaceParamsSchema }, + response: { description: "Marketplace resolved detail.", schema: marketplaceResolvedResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + addMarketplacePlugin: { + audience: "admin", + description: "Add a plugin to a marketplace using marketplace-scoped write access.", + method: "POST", + path: pluginArchRoutePaths.marketplacePlugins, + request: { body: marketplacePluginWriteSchema, params: marketplaceParamsSchema }, + response: { description: "Marketplace plugin membership created successfully.", schema: marketplacePluginMutationResponseSchema, status: 201 }, + tag: "Marketplaces", + }, + removeMarketplacePlugin: { + audience: "admin", + description: "Remove one plugin membership from a marketplace.", + method: "DELETE", + path: pluginArchRoutePaths.marketplacePlugin, + request: { params: marketplacePluginParamsSchema }, + response: { description: "Marketplace plugin membership removed successfully.", status: 204 }, + tag: "Marketplaces", + }, + listMarketplaceAccess: { + audience: "admin", + description: "List direct, team, and org-wide grants for a marketplace.", + method: "GET", + path: pluginArchRoutePaths.marketplaceAccess, + request: { params: marketplaceParamsSchema }, + response: { description: "Marketplace access grants.", schema: accessGrantListResponseSchema, status: 200 }, + tag: "Marketplaces", + }, + grantMarketplaceAccess: { + audience: "admin", + description: "Create one direct, team, or org-wide access grant for a marketplace.", + method: "POST", + path: pluginArchRoutePaths.marketplaceAccess, + request: { body: resourceAccessGrantWriteSchema, params: marketplaceParamsSchema }, + response: { description: "Marketplace access grant created successfully.", schema: accessGrantMutationResponseSchema, status: 201 }, + tag: "Marketplaces", + }, + revokeMarketplaceAccess: { + audience: "admin", + description: "Soft-revoke one marketplace access grant.", + method: "DELETE", + path: pluginArchRoutePaths.marketplaceAccessGrant, + request: { params: marketplaceAccessGrantParamsSchema }, + response: { description: "Marketplace access grant revoked successfully.", status: 204 }, + tag: "Marketplaces", + }, + listConnectorAccounts: { + audience: "admin", + description: "List connector accounts such as GitHub App installations available to the org.", + method: "GET", + path: pluginArchRoutePaths.connectorAccounts, + request: { query: connectorAccountListQuerySchema }, + response: { description: "Connector account list.", schema: connectorAccountListResponseSchema, status: 200 }, + tag: "Connectors", + }, + createConnectorAccount: { + audience: "admin", + description: "Create a reusable connector account record.", + method: "POST", + path: pluginArchRoutePaths.connectorAccounts, + request: { body: connectorAccountCreateSchema }, + response: { description: "Connector account created successfully.", schema: connectorAccountMutationResponseSchema, status: 201 }, + tag: "Connectors", + }, + getConnectorAccount: { + audience: "admin", + description: "Get one connector account record.", + method: "GET", + path: pluginArchRoutePaths.connectorAccount, + request: { params: connectorAccountParamsSchema }, + response: { description: "Connector account detail.", schema: connectorAccountDetailResponseSchema, status: 200 }, + tag: "Connectors", + }, + disconnectConnectorAccount: { + audience: "admin", + description: "Disconnect one connector account and delete its connector-managed imports (mappings, config objects, plugins created via discovery, and empty marketplaces).", + method: "POST", + path: pluginArchRoutePaths.connectorAccountDisconnect, + request: { body: connectorAccountDisconnectSchema, params: connectorAccountParamsSchema }, + response: { description: "Connector account disconnected and cleaned up successfully.", schema: connectorAccountDisconnectResponseSchema, status: 200 }, + tag: "Connectors", + }, + listConnectorInstances: { + audience: "admin", + description: "List configured connector instances for the org.", + method: "GET", + path: pluginArchRoutePaths.connectorInstances, + request: { query: connectorInstanceListQuerySchema }, + response: { description: "Connector instance list.", schema: connectorInstanceListResponseSchema, status: 200 }, + tag: "Connectors", + }, + createConnectorInstance: { + audience: "admin", + description: "Create a connector instance backed by one connector account.", + method: "POST", + path: pluginArchRoutePaths.connectorInstances, + request: { body: connectorInstanceCreateSchema }, + response: { description: "Connector instance created successfully.", schema: connectorInstanceMutationResponseSchema, status: 201 }, + tag: "Connectors", + }, + getConnectorInstance: { + audience: "admin", + description: "Get one connector instance.", + method: "GET", + path: pluginArchRoutePaths.connectorInstance, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance detail.", schema: connectorInstanceDetailResponseSchema, status: 200 }, + tag: "Connectors", + }, + updateConnectorInstance: { + audience: "admin", + description: "Patch connector instance metadata or config.", + method: "PATCH", + path: pluginArchRoutePaths.connectorInstance, + request: { body: connectorInstanceUpdateSchema, params: connectorInstanceParamsSchema }, + response: { description: "Connector instance updated successfully.", schema: connectorInstanceMutationResponseSchema, status: 200 }, + tag: "Connectors", + }, + archiveConnectorInstance: { + audience: "admin", + description: "Archive a connector instance.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceArchive, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance archived successfully.", schema: connectorInstanceMutationResponseSchema, status: 200 }, + tag: "Connectors", + }, + getConnectorInstanceConfiguration: { + audience: "admin", + description: "Return the configured plugins, mappings, and import state for a connector instance.", + method: "GET", + path: pluginArchRoutePaths.connectorInstanceConfiguration, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance configuration returned successfully.", schema: connectorInstanceConfigurationResponseSchema, status: 200 }, + tag: "Connectors", + }, + setConnectorInstanceAutoImport: { + audience: "admin", + description: "Enable or disable auto-import of new plugins for a connector instance.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceAutoImport, + request: { body: connectorInstanceAutoImportSchema, params: connectorInstanceParamsSchema }, + response: { description: "Connector instance auto-import updated successfully.", schema: connectorInstanceConfigurationResponseSchema, status: 200 }, + tag: "Connectors", + }, + removeConnectorInstance: { + audience: "admin", + description: "Remove a connector instance and delete its associated plugins, mappings, config objects, and bindings.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceRemove, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance removed and cleaned up successfully.", schema: connectorInstanceRemoveResponseSchema, status: 200 }, + tag: "Connectors", + }, + disableConnectorInstance: { + audience: "admin", + description: "Disable sync execution for a connector instance.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceDisable, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance disabled successfully.", schema: connectorInstanceMutationResponseSchema, status: 200 }, + tag: "Connectors", + }, + enableConnectorInstance: { + audience: "admin", + description: "Re-enable sync execution for a connector instance.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceEnable, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance enabled successfully.", schema: connectorInstanceMutationResponseSchema, status: 200 }, + tag: "Connectors", + }, + listConnectorInstanceAccess: { + audience: "admin", + description: "List direct, team, and org-wide grants for a connector instance.", + method: "GET", + path: pluginArchRoutePaths.connectorInstanceAccess, + request: { params: connectorInstanceParamsSchema }, + response: { description: "Connector instance access grants.", schema: accessGrantListResponseSchema, status: 200 }, + tag: "Connectors", + }, + grantConnectorInstanceAccess: { + audience: "admin", + description: "Create one direct, team, or org-wide access grant for a connector instance.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceAccess, + request: { body: resourceAccessGrantWriteSchema, params: connectorInstanceParamsSchema }, + response: { description: "Connector instance access grant created successfully.", schema: accessGrantMutationResponseSchema, status: 201 }, + tag: "Connectors", + }, + revokeConnectorInstanceAccess: { + audience: "admin", + description: "Soft-revoke one connector instance access grant.", + method: "DELETE", + path: pluginArchRoutePaths.connectorInstanceAccessGrant, + request: { params: connectorInstanceAccessGrantParamsSchema }, + response: { description: "Connector instance access grant revoked successfully.", status: 204 }, + tag: "Connectors", + }, + listConnectorTargets: { + audience: "admin", + description: "List external targets configured under a connector instance.", + method: "GET", + path: pluginArchRoutePaths.connectorTargets, + request: { params: connectorInstanceParamsSchema, query: connectorTargetListQuerySchema }, + response: { description: "Connector target list.", schema: connectorTargetListResponseSchema, status: 200 }, + tag: "Connectors", + }, + createConnectorTarget: { + audience: "admin", + description: "Create one connector target such as a GitHub repository branch.", + method: "POST", + path: pluginArchRoutePaths.connectorTargets, + request: { body: connectorTargetCreateSchema, params: connectorInstanceParamsSchema }, + response: { description: "Connector target created successfully.", schema: connectorTargetMutationResponseSchema, status: 201 }, + tag: "Connectors", + }, + getConnectorTarget: { + audience: "admin", + description: "Get one connector target.", + method: "GET", + path: pluginArchRoutePaths.connectorTarget, + request: { params: connectorTargetParamsSchema }, + response: { description: "Connector target detail.", schema: connectorTargetDetailResponseSchema, status: 200 }, + tag: "Connectors", + }, + updateConnectorTarget: { + audience: "admin", + description: "Patch one connector target.", + method: "PATCH", + path: pluginArchRoutePaths.connectorTarget, + request: { body: connectorTargetUpdateSchema, params: connectorTargetParamsSchema }, + response: { description: "Connector target updated successfully.", schema: connectorTargetMutationResponseSchema, status: 200 }, + tag: "Connectors", + }, + resyncConnectorTarget: { + audience: "admin", + description: "Queue a manual reconciliation run for one connector target.", + method: "POST", + path: pluginArchRoutePaths.connectorTargetResync, + request: { params: connectorTargetParamsSchema }, + response: { description: "Connector target resync queued successfully.", schema: connectorSyncAsyncResponseSchema, status: 202 }, + tag: "Connectors", + }, + listConnectorMappings: { + audience: "admin", + description: "List mappings configured under a connector target.", + method: "GET", + path: pluginArchRoutePaths.connectorTargetMappings, + request: { params: connectorTargetParamsSchema, query: connectorMappingListQuerySchema }, + response: { description: "Connector mapping list.", schema: connectorMappingListResponseSchema, status: 200 }, + tag: "Connectors", + }, + createConnectorMapping: { + audience: "admin", + description: "Create a path or API mapping for a connector target.", + method: "POST", + path: pluginArchRoutePaths.connectorTargetMappings, + request: { body: connectorMappingCreateSchema, params: connectorTargetParamsSchema }, + response: { description: "Connector mapping created successfully.", schema: connectorMappingMutationResponseSchema, status: 201 }, + tag: "Connectors", + }, + updateConnectorMapping: { + audience: "admin", + description: "Patch one connector mapping.", + method: "PATCH", + path: pluginArchRoutePaths.connectorMapping, + request: { body: connectorMappingUpdateSchema, params: connectorMappingParamsSchema }, + response: { description: "Connector mapping updated successfully.", schema: connectorMappingMutationResponseSchema, status: 200 }, + tag: "Connectors", + }, + deleteConnectorMapping: { + audience: "admin", + description: "Delete one connector mapping.", + method: "DELETE", + path: pluginArchRoutePaths.connectorMapping, + request: { params: connectorMappingParamsSchema }, + response: { description: "Connector mapping deleted successfully.", status: 204 }, + tag: "Connectors", + }, + listConnectorSyncEvents: { + audience: "admin", + description: "List connector sync events for inspection and debugging.", + method: "GET", + path: pluginArchRoutePaths.connectorSyncEvents, + request: { query: connectorSyncEventListQuerySchema }, + response: { description: "Connector sync event list.", schema: connectorSyncEventListResponseSchema, status: 200 }, + tag: "Connectors", + }, + getConnectorSyncEvent: { + audience: "admin", + description: "Get one connector sync event.", + method: "GET", + path: pluginArchRoutePaths.connectorSyncEvent, + request: { params: connectorSyncEventParamsSchema }, + response: { description: "Connector sync event detail.", schema: connectorSyncEventDetailResponseSchema, status: 200 }, + tag: "Connectors", + }, + retryConnectorSyncEvent: { + audience: "admin", + description: "Queue a retry for a failed or partial connector sync event.", + method: "POST", + path: pluginArchRoutePaths.connectorSyncEventRetry, + request: { params: connectorSyncEventParamsSchema }, + response: { description: "Connector sync retry queued successfully.", schema: connectorSyncAsyncResponseSchema, status: 202 }, + tag: "Connectors", + }, + getGithubConnectorDiscovery: { + audience: "admin", + description: "Analyze a GitHub connector instance and return the discovered repository shape and plugin candidates.", + method: "GET", + path: pluginArchRoutePaths.connectorInstanceDiscovery, + request: { params: connectorInstanceParamsSchema }, + response: { description: "GitHub connector discovery returned successfully.", schema: githubConnectorDiscoveryResponseSchema, status: 200 }, + tag: "GitHub", + }, + listGithubConnectorDiscoveryTree: { + audience: "admin", + description: "Page through the normalized GitHub repository tree for one connector instance.", + method: "GET", + path: pluginArchRoutePaths.connectorInstanceDiscoveryTree, + request: { params: connectorInstanceParamsSchema, query: githubDiscoveryTreeQuerySchema }, + response: { description: "GitHub discovery tree page returned successfully.", schema: githubDiscoveryTreeResponseSchema, status: 200 }, + tag: "GitHub", + }, + applyGithubConnectorDiscovery: { + audience: "admin", + description: "Create OpenWork plugins and connector mappings from selected GitHub discovery candidates.", + method: "POST", + path: pluginArchRoutePaths.connectorInstanceDiscoveryApply, + request: { body: githubDiscoveryApplySchema, params: connectorInstanceParamsSchema }, + response: { description: "GitHub discovery selection applied successfully.", schema: githubDiscoveryApplyResponseSchema, status: 200 }, + tag: "GitHub", + }, + githubInstallStart: { + audience: "admin", + description: "Start the GitHub App install flow and return a redirect URL.", + method: "POST", + path: pluginArchRoutePaths.githubInstallStart, + request: { body: githubInstallStartSchema }, + response: { description: "GitHub install redirect created successfully.", schema: githubInstallStartResponseSchema, status: 200 }, + tag: "GitHub", + }, + githubInstallComplete: { + audience: "admin", + description: "Complete one GitHub App installation and return repositories visible to it.", + method: "POST", + path: pluginArchRoutePaths.githubInstallComplete, + request: { body: githubInstallCompleteSchema }, + response: { description: "GitHub installation completed successfully.", schema: githubInstallCompleteResponseSchema, status: 200 }, + tag: "GitHub", + }, + githubSetup: { + audience: "admin", + description: "Create the GitHub connector account, instance, target, and initial mappings in one setup flow.", + method: "POST", + path: pluginArchRoutePaths.githubSetup, + request: { body: githubConnectorSetupSchema }, + response: { description: "GitHub connector setup created successfully.", schema: githubSetupResponseSchema, status: 201 }, + tag: "GitHub", + }, + githubCreateAccount: { + audience: "admin", + description: "Persist a GitHub App installation as a reusable connector account.", + method: "POST", + path: pluginArchRoutePaths.githubAccounts, + request: { body: githubConnectorAccountCreateSchema }, + response: { description: "GitHub connector account created successfully.", schema: connectorAccountMutationResponseSchema, status: 201 }, + tag: "GitHub", + }, + githubListRepositories: { + audience: "admin", + description: "List repositories visible to one GitHub connector account.", + method: "GET", + path: pluginArchRoutePaths.githubAccountRepositories, + request: { params: connectorAccountRepositoryParamsSchema, query: githubRepositoryListQuerySchema }, + response: { description: "GitHub repositories visible to the installation.", schema: githubRepositoryListResponseSchema, status: 200 }, + tag: "GitHub", + }, + githubValidateTarget: { + audience: "admin", + description: "Validate one GitHub repository-branch target before persisting it.", + method: "POST", + path: pluginArchRoutePaths.githubValidateTarget, + request: { body: githubValidateTargetSchema }, + response: { description: "GitHub target validation result.", schema: githubValidateTargetResponseSchema, status: 200 }, + tag: "GitHub", + }, + githubWebhookIngress: { + audience: "public_webhook", + description: "Accept a GitHub App webhook delivery, verify the raw-body signature, and enqueue any relevant sync work.", + method: "POST", + path: pluginArchRoutePaths.githubWebhookIngress, + request: { body: githubWebhookRawBodySchema, headers: githubWebhookHeadersSchema }, + response: { description: "Valid webhook accepted or ignored.", schema: githubWebhookAcceptedResponseSchema.or(githubWebhookIgnoredResponseSchema), status: 202 }, + tag: "Webhooks", + }, +} + +export const deferredPluginArchEndpointContracts: DeferredEndpointContract[] = [ + { + description: "Compare two config object versions.", + method: "GET", + path: pluginArchRoutePaths.configObjectCompareVersions, + reason: "Diff semantics can wait until immutable version storage exists.", + tag: "Config Objects", + }, + { + description: "Type-specific convenience endpoints for skills.", + method: "GET", + path: pluginArchRoutePaths.skills, + reason: "Shared config-object routes land first; per-type wrappers follow once the core surface is working.", + tag: "Config Objects", + }, + { + description: "Type-specific convenience endpoints for agents.", + method: "GET", + path: pluginArchRoutePaths.agents, + reason: "Shared config-object routes land first; per-type wrappers follow once the core surface is working.", + tag: "Config Objects", + }, + { + description: "Type-specific convenience endpoints for commands.", + method: "GET", + path: pluginArchRoutePaths.commands, + reason: "Shared config-object routes land first; per-type wrappers follow once the core surface is working.", + tag: "Config Objects", + }, + { + description: "Type-specific convenience endpoints for tools.", + method: "GET", + path: pluginArchRoutePaths.tools, + reason: "Shared config-object routes land first; per-type wrappers follow once the core surface is working.", + tag: "Config Objects", + }, + { + description: "Type-specific convenience endpoints for MCPs.", + method: "GET", + path: pluginArchRoutePaths.mcps, + reason: "Shared config-object routes land first; per-type wrappers follow once the core surface is working.", + tag: "Config Objects", + }, + { + description: "Create and list plugin releases.", + method: "POST", + path: pluginArchRoutePaths.pluginReleases, + reason: "Delivery and release snapshots stay deferred until the admin and webhook slice is live.", + tag: "Plugins", + }, + { + description: "Preview one connector mapping against remote source data.", + method: "POST", + path: pluginArchRoutePaths.connectorMappingPreview, + reason: "Mapping preview depends on the later reconciliation engine and should not block the first admin slice.", + tag: "Connectors", + }, +] + +export const pluginArchContractSummary = { + implementationHome: { + adminApi: "ee/apps/den-api/src/routes/org", + persistence: "ee/packages/den-db/src/schema", + webhookIngress: "ee/apps/den-api/src/routes", + }, + outOfScope: [ + "plugin delivery/install endpoints", + "plugin release snapshot implementation", + "type-specific convenience wrappers", + ], +} as const diff --git a/ee/apps/den-api/src/routes/org/plugin-system/github-app.ts b/ee/apps/den-api/src/routes/org/plugin-system/github-app.ts new file mode 100644 index 0000000000..4fbbe8734c --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/github-app.ts @@ -0,0 +1,642 @@ +import { createHmac, createSign, randomUUID, timingSafeEqual } from "node:crypto" + +export class GithubConnectorConfigError extends Error { + constructor(message: string) { + super(message) + this.name = "GithubConnectorConfigError" + } +} + +export class GithubConnectorRequestError extends Error { + constructor( + message: string, + readonly status: number, + readonly body?: unknown, + ) { + super(message) + this.name = "GithubConnectorRequestError" + } +} + +export type GithubConnectorAppConfig = { + appId: string + clientId?: string + clientSecret?: string + privateKey: string +} + +type GithubFetch = typeof fetch + +export type GithubManifestKind = "marketplace" | "plugin" | null + +type GithubRepositorySummary = { + defaultBranch: string | null + fullName: string + hasPluginManifest?: boolean + id: number + manifestKind?: GithubManifestKind + marketplacePluginCount?: number | null + private: boolean +} + +export type GithubRepositoryTreeEntry = { + id: string + kind: "blob" | "tree" + path: string + sha: string | null + size: number | null +} + +export type GithubRepositoryTreeSnapshot = { + headSha: string + truncated: boolean + treeEntries: GithubRepositoryTreeEntry[] + treeSha: string +} + +export type GithubAppSummary = { + htmlUrl: string + name: string + slug: string +} + +export type GithubInstallationSummary = { + accountLogin: string + accountType: "Organization" | "User" + displayName: string + installationId: number + repositorySelection: "all" | "selected" + settingsUrl: string | null +} + +export type GithubInstallStatePayload = { + exp: number + nonce: string + orgId: string + returnPath: string + userId: string +} + +const GITHUB_API_BASE = "https://api.github.com" +const GITHUB_API_VERSION = "2022-11-28" + +function base64UrlEncode(value: unknown) { + const buffer = typeof value === "string" + ? Buffer.from(value) + : Buffer.isBuffer(value) + ? value + : value instanceof Uint8Array + ? Buffer.from(value.buffer, value.byteOffset, value.byteLength) + : (() => { + throw new GithubConnectorConfigError("Unsupported value passed to base64UrlEncode.") + })() + + return buffer + .toString("base64") + .replace(/\+/g, "-") + .replace(/\//g, "_") + .replace(/=+$/g, "") +} + +export function normalizeGithubPrivateKey(privateKey: string) { + return privateKey.includes("\\n") ? privateKey.replace(/\\n/g, "\n") : privateKey +} + +export function getGithubConnectorAppConfig(input: { appId?: string; privateKey?: string }) { + const appId = input.appId?.trim() + const privateKey = input.privateKey?.trim() + + if (!appId) { + throw new GithubConnectorConfigError("GITHUB_CONNECTOR_APP_ID is required for live GitHub connector testing.") + } + + if (!privateKey) { + throw new GithubConnectorConfigError("GITHUB_CONNECTOR_APP_PRIVATE_KEY is required for live GitHub connector testing.") + } + + return { + appId, + privateKey: normalizeGithubPrivateKey(privateKey), + } satisfies GithubConnectorAppConfig +} + +function base64UrlDecode(value: string) { + return Buffer.from(value, "base64url") +} + +function isSafeRelativeReturnPath(value: string) { + return value.startsWith("/") && !value.startsWith("//") +} + +export function createGithubInstallStateToken(input: { + now?: Date | number + orgId: string + returnPath: string + secret: string + ttlSeconds?: number + userId: string +}) { + const nowMs = input.now instanceof Date ? input.now.getTime() : (typeof input.now === "number" ? input.now : Date.now()) + const returnPath = input.returnPath.trim() + if (!isSafeRelativeReturnPath(returnPath)) { + throw new GithubConnectorConfigError("GitHub install return path must be a safe relative path.") + } + + const payload: GithubInstallStatePayload = { + exp: Math.floor(nowMs / 1000) + (input.ttlSeconds ?? 10 * 60), + nonce: randomUUID(), + orgId: input.orgId, + returnPath, + userId: input.userId, + } + const encodedPayload = base64UrlEncode(JSON.stringify(payload)) + const signature = base64UrlEncode(createHmac("sha256", input.secret).update(encodedPayload).digest()) + return `${encodedPayload}.${signature}` +} + +export function verifyGithubInstallStateToken(input: { now?: Date | number; secret: string; token: string }) { + const [encodedPayload, encodedSignature] = input.token.split(".") + if (!encodedPayload || !encodedSignature) { + return null + } + + try { + const expectedSignature = createHmac("sha256", input.secret).update(encodedPayload).digest() + const providedSignature = base64UrlDecode(encodedSignature) + const expectedBytes = new Uint8Array(expectedSignature) + const providedBytes = new Uint8Array(providedSignature) + if (expectedBytes.length !== providedBytes.length || !timingSafeEqual(expectedBytes, providedBytes)) { + return null + } + + const payload = JSON.parse(base64UrlDecode(encodedPayload).toString("utf8")) as Partial + const nowSeconds = Math.floor((input.now instanceof Date ? input.now.getTime() : (typeof input.now === "number" ? input.now : Date.now())) / 1000) + if ( + typeof payload.exp !== "number" + || typeof payload.nonce !== "string" + || typeof payload.orgId !== "string" + || typeof payload.returnPath !== "string" + || typeof payload.userId !== "string" + || payload.exp < nowSeconds + || !isSafeRelativeReturnPath(payload.returnPath) + ) { + return null + } + return payload as GithubInstallStatePayload + } catch { + return null + } +} + +export function createGithubAppJwt(input: GithubConnectorAppConfig & { now?: Date | number }) { + const nowMs = input.now instanceof Date ? input.now.getTime() : (typeof input.now === "number" ? input.now : Date.now()) + const issuedAt = Math.floor(nowMs / 1000) - 60 + const expiresAt = issuedAt + (9 * 60) + const signingInput = [ + base64UrlEncode(JSON.stringify({ alg: "RS256", typ: "JWT" })), + base64UrlEncode(JSON.stringify({ exp: expiresAt, iat: issuedAt, iss: input.appId })), + ].join(".") + + const signer = createSign("RSA-SHA256") + signer.update(signingInput) + signer.end() + + return `${signingInput}.${base64UrlEncode(signer.sign(input.privateKey))}` +} + +async function requestGithubJson(input: { + fetchFn?: GithubFetch + headers?: Record + method?: "GET" | "POST" + path: string + allowStatuses?: number[] +}) { + const fetchFn = input.fetchFn ?? fetch + const response = await fetchFn(`${GITHUB_API_BASE}${input.path}`, { + headers: { + Accept: "application/vnd.github+json", + "User-Agent": "openwork-den-api", + "X-GitHub-Api-Version": GITHUB_API_VERSION, + ...input.headers, + }, + method: input.method ?? "GET", + }) + + const text = await response.text() + const body = text ? JSON.parse(text) as unknown : null + if (!response.ok && !(input.allowStatuses ?? []).includes(response.status)) { + const message = body && typeof body === "object" && typeof (body as Record).message === "string" + ? (body as Record).message as string + : `GitHub request failed with status ${response.status}.` + throw new GithubConnectorRequestError(message, response.status, body) + } + + return { + body: body as TResponse, + ok: response.ok, + status: response.status, + } +} + +export async function getGithubAppSummary(input: { config: GithubConnectorAppConfig; fetchFn?: GithubFetch }) { + const jwt = createGithubAppJwt(input.config) + const response = await requestGithubJson<{ html_url?: string; name?: string; slug?: string }>({ + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${jwt}`, + }, + path: "/app", + }) + + const htmlUrl = typeof response.body.html_url === "string" ? response.body.html_url.trim() : "" + const slug = typeof response.body.slug === "string" ? response.body.slug.trim() : "" + const name = typeof response.body.name === "string" ? response.body.name.trim() : "" + if (!htmlUrl || !slug || !name) { + throw new GithubConnectorRequestError("GitHub app metadata response was incomplete.", 502, response.body) + } + + return { + htmlUrl, + name, + slug, + } satisfies GithubAppSummary +} + +export function buildGithubAppInstallUrl(input: { app: GithubAppSummary; state: string }) { + const url = new URL(`${input.app.htmlUrl.replace(/\/+$/, "")}/installations/new`) + url.searchParams.set("state", input.state) + return url.toString() +} + +export async function getGithubInstallationSummary(input: { config: GithubConnectorAppConfig; fetchFn?: GithubFetch; installationId: number }) { + const jwt = createGithubAppJwt(input.config) + const response = await requestGithubJson<{ + account?: { + login?: string + type?: string + } + html_url?: string + id?: number + repository_selection?: string + }>({ + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${jwt}`, + }, + path: `/app/installations/${input.installationId}`, + }) + + const installationId = typeof response.body.id === "number" ? response.body.id : input.installationId + const accountLogin = typeof response.body.account?.login === "string" ? response.body.account.login.trim() : "" + const accountType = response.body.account?.type === "Organization" ? "Organization" : "User" + const repositorySelection = response.body.repository_selection === "selected" ? "selected" : "all" + if (!accountLogin) { + throw new GithubConnectorRequestError("GitHub installation response was missing the account login.", 502, response.body) + } + + return { + accountLogin, + accountType, + displayName: accountLogin, + installationId, + repositorySelection, + settingsUrl: typeof response.body.html_url === "string" ? response.body.html_url.trim() || null : null, + } satisfies GithubInstallationSummary +} + +async function createGithubInstallationAccessToken(input: { config: GithubConnectorAppConfig; fetchFn?: GithubFetch; installationId: number }) { + const jwt = createGithubAppJwt(input.config) + const response = await requestGithubJson<{ token?: string }>({ + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${jwt}`, + }, + method: "POST", + path: `/app/installations/${input.installationId}/access_tokens`, + }) + + const token = typeof response.body?.token === "string" ? response.body.token : null + if (!token) { + throw new GithubConnectorRequestError("GitHub did not return an installation access token.", 502, response.body) + } + + return token +} + +export async function getGithubInstallationAccessToken(input: { config: GithubConnectorAppConfig; fetchFn?: GithubFetch; installationId: number }) { + return createGithubInstallationAccessToken(input) +} + +function normalizeGithubRepository(entry: unknown): GithubRepositorySummary | null { + if (!entry || typeof entry !== "object") { + return null + } + + const candidate = entry as Record + const id = typeof candidate.id === "number" ? candidate.id : Number(candidate.id) + const fullName = typeof candidate.full_name === "string" + ? candidate.full_name + : typeof candidate.fullName === "string" + ? candidate.fullName + : null + + if (!Number.isFinite(id) || !fullName) { + return null + } + + return { + defaultBranch: typeof candidate.default_branch === "string" + ? candidate.default_branch + : typeof candidate.defaultBranch === "string" + ? candidate.defaultBranch + : null, + fullName, + id, + private: Boolean(candidate.private), + } +} + +export async function listGithubInstallationRepositories(input: { config: GithubConnectorAppConfig; fetchFn?: GithubFetch; installationId: number }) { + const token = await createGithubInstallationAccessToken(input) + const response = await requestGithubJson<{ repositories?: unknown[] }>({ + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${token}`, + }, + path: "/installation/repositories", + }) + + if (!Array.isArray(response.body.repositories)) { + return [] + } + + const repositories: GithubRepositorySummary[] = [] + for (const entry of response.body.repositories) { + const normalized = normalizeGithubRepository(entry) + if (!normalized) { + continue + } + + const manifest = await detectRepositoryManifest({ + fetchFn: input.fetchFn, + ownerAndRepo: normalized.fullName, + token, + }) + + repositories.push({ + ...normalized, + hasPluginManifest: manifest.manifestKind !== null, + manifestKind: manifest.manifestKind, + marketplacePluginCount: manifest.marketplacePluginCount, + }) + } + + return repositories +} + +async function detectRepositoryManifest(input: { fetchFn?: GithubFetch; ownerAndRepo: string; token: string }): Promise<{ + manifestKind: GithubManifestKind + marketplacePluginCount: number | null +}> { + const parts = splitRepositoryFullName(input.ownerAndRepo) + if (!parts) { + return { manifestKind: null, marketplacePluginCount: null } + } + + const marketplaceResponse = await requestGithubJson<{ content?: string; encoding?: string }>({ + allowStatuses: [404], + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${input.token}`, + }, + path: `/repos/${encodeURIComponent(parts.owner)}/${encodeURIComponent(parts.repo)}/contents/.claude-plugin/marketplace.json`, + }) + + if (marketplaceResponse.ok && typeof marketplaceResponse.body?.content === "string" && marketplaceResponse.body.encoding === "base64") { + let marketplacePluginCount: number | null = null + try { + const decoded = Buffer.from(marketplaceResponse.body.content.replace(/\n/g, ""), "base64").toString("utf8") + const parsed = JSON.parse(decoded) as unknown + if (parsed && typeof parsed === "object" && !Array.isArray(parsed) && Array.isArray((parsed as Record).plugins)) { + marketplacePluginCount = ((parsed as Record).plugins as unknown[]).length + } + } catch { + marketplacePluginCount = null + } + return { manifestKind: "marketplace", marketplacePluginCount } + } + + const pluginResponse = await requestGithubJson({ + allowStatuses: [404], + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${input.token}`, + }, + path: `/repos/${encodeURIComponent(parts.owner)}/${encodeURIComponent(parts.repo)}/contents/.claude-plugin/plugin.json`, + }) + + if (pluginResponse.ok) { + return { manifestKind: "plugin", marketplacePluginCount: null } + } + + return { manifestKind: null, marketplacePluginCount: null } +} + +function splitRepositoryFullName(repositoryFullName: string) { + const [owner, repo, ...rest] = repositoryFullName.trim().split("/") + if (!owner || !repo || rest.length > 0) { + return null + } + + return { owner, repo } +} + +export async function getGithubRepositoryTextFile(input: { + config: GithubConnectorAppConfig + fetchFn?: GithubFetch + installationId: number + path: string + ref: string + repositoryFullName: string + token?: string +}) { + const repositoryParts = splitRepositoryFullName(input.repositoryFullName) + if (!repositoryParts) { + throw new GithubConnectorRequestError("GitHub repository full name is invalid.", 400) + } + + const token = input.token ?? await createGithubInstallationAccessToken(input) + const response = await requestGithubJson<{ content?: string; encoding?: string }>({ + allowStatuses: [404], + fetchFn: input.fetchFn, + headers: { + Authorization: `Bearer ${token}`, + }, + path: `/repos/${encodeURIComponent(repositoryParts.owner)}/${encodeURIComponent(repositoryParts.repo)}/contents/${input.path.split("/").map(encodeURIComponent).join("/")}?ref=${encodeURIComponent(input.ref)}`, + }) + + if (!response.ok) { + return null + } + + if (response.body.encoding !== "base64" || typeof response.body.content !== "string") { + throw new GithubConnectorRequestError("GitHub file response was incomplete.", 502, response.body) + } + + return Buffer.from(response.body.content.replace(/\n/g, ""), "base64").toString("utf8") +} + +export async function getGithubRepositoryTree(input: { + branch: string + config: GithubConnectorAppConfig + fetchFn?: GithubFetch + installationId: number + repositoryFullName: string + token?: string +}) { + const repositoryParts = splitRepositoryFullName(input.repositoryFullName) + if (!repositoryParts) { + throw new GithubConnectorRequestError("GitHub repository full name is invalid.", 400) + } + + const token = input.token ?? await createGithubInstallationAccessToken(input) + const authHeaders = { + Authorization: `Bearer ${token}`, + } + const commitResponse = await requestGithubJson<{ + commit?: { + tree?: { + sha?: string + } + } + sha?: string + }>({ + fetchFn: input.fetchFn, + headers: authHeaders, + path: `/repos/${encodeURIComponent(repositoryParts.owner)}/${encodeURIComponent(repositoryParts.repo)}/commits/${encodeURIComponent(input.branch.trim())}`, + }) + + const headSha = typeof commitResponse.body.sha === "string" ? commitResponse.body.sha : "" + const treeSha = typeof commitResponse.body.commit?.tree?.sha === "string" ? commitResponse.body.commit.tree.sha : "" + if (!headSha || !treeSha) { + throw new GithubConnectorRequestError("GitHub commit response was missing the head or tree sha.", 502, commitResponse.body) + } + + const treeResponse = await requestGithubJson<{ + truncated?: boolean + tree?: Array<{ + path?: string + sha?: string + size?: number + type?: string + }> + }>({ + fetchFn: input.fetchFn, + headers: authHeaders, + path: `/repos/${encodeURIComponent(repositoryParts.owner)}/${encodeURIComponent(repositoryParts.repo)}/git/trees/${encodeURIComponent(treeSha)}?recursive=1`, + }) + + const treeEntries = Array.isArray(treeResponse.body.tree) + ? treeResponse.body.tree.flatMap((entry) => { + const path = typeof entry.path === "string" ? entry.path.trim() : "" + const kind = entry.type === "blob" || entry.type === "tree" ? entry.type : null + if (!path || !kind) { + return [] + } + + return [{ + id: path, + kind, + path, + sha: typeof entry.sha === "string" ? entry.sha : null, + size: typeof entry.size === "number" ? entry.size : null, + } satisfies GithubRepositoryTreeEntry] + }) + : [] + + return { + headSha, + truncated: Boolean(treeResponse.body.truncated), + treeEntries, + treeSha, + } satisfies GithubRepositoryTreeSnapshot +} + +export async function validateGithubInstallationTarget(input: { + branch: string + config: GithubConnectorAppConfig + fetchFn?: GithubFetch + installationId: number + ref: string + repositoryFullName: string + repositoryId: number + token?: string +}) { + const repositoryParts = splitRepositoryFullName(input.repositoryFullName) + if (!repositoryParts) { + return { + branchExists: false, + defaultBranch: null, + repositoryAccessible: false, + } + } + + const token = input.token ?? await createGithubInstallationAccessToken(input) + const authHeaders = { + Authorization: `Bearer ${token}`, + } + const repositoryResponse = await requestGithubJson<{ + default_branch?: string + full_name?: string + id?: number + }>({ + allowStatuses: [404], + fetchFn: input.fetchFn, + headers: authHeaders, + path: `/repos/${encodeURIComponent(repositoryParts.owner)}/${encodeURIComponent(repositoryParts.repo)}`, + }) + + if (!repositoryResponse.ok) { + return { + branchExists: false, + defaultBranch: null, + repositoryAccessible: false, + } + } + + const defaultBranch = typeof repositoryResponse.body.default_branch === "string" + ? repositoryResponse.body.default_branch + : null + const repositoryAccessible = repositoryResponse.body.id === input.repositoryId + && repositoryResponse.body.full_name === input.repositoryFullName + + if (!repositoryAccessible) { + return { + branchExists: false, + defaultBranch, + repositoryAccessible: false, + } + } + + const expectedRef = `refs/heads/${input.branch.trim()}` + if (input.ref.trim() !== expectedRef) { + return { + branchExists: false, + defaultBranch, + repositoryAccessible: true, + } + } + + const branchResponse = await requestGithubJson<{ name?: string }>({ + allowStatuses: [404], + fetchFn: input.fetchFn, + headers: authHeaders, + path: `/repos/${encodeURIComponent(repositoryParts.owner)}/${encodeURIComponent(repositoryParts.repo)}/branches/${encodeURIComponent(input.branch.trim())}`, + }) + + return { + branchExists: branchResponse.ok && branchResponse.body.name === input.branch.trim(), + defaultBranch, + repositoryAccessible: true, + } +} diff --git a/ee/apps/den-api/src/routes/org/plugin-system/github-discovery.ts b/ee/apps/den-api/src/routes/org/plugin-system/github-discovery.ts new file mode 100644 index 0000000000..e7ce1f7c24 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/github-discovery.ts @@ -0,0 +1,519 @@ +type GithubDiscoveryTreeEntryKind = "blob" | "tree" + +export type GithubDiscoveryTreeEntry = { + id: string + kind: GithubDiscoveryTreeEntryKind + path: string + sha: string | null + size: number | null +} + +export type GithubDiscoveryClassification = + | "claude_marketplace_repo" + | "claude_multi_plugin_repo" + | "claude_single_plugin_repo" + | "folder_inferred_repo" + | "unsupported" + +export type GithubDiscoveredPluginSourceKind = + | "marketplace_entry" + | "plugin_manifest" + | "standalone_claude" + | "folder_inference" + +export type GithubDiscoveredPluginComponentKind = + | "skill" + | "command" + | "agent" + | "hook" + | "mcp_server" + | "lsp_server" + | "monitor" + | "settings" + +export type GithubDiscoveredPlugin = { + componentKinds: GithubDiscoveredPluginComponentKind[] + componentPaths: { + agents: string[] + commands: string[] + hooks: string[] + lspServers: string[] + mcpServers: string[] + monitors: string[] + settings: string[] + skills: string[] + } + description: string | null + displayName: string + key: string + manifestPath: string | null + metadata: Record + rootPath: string + selectedByDefault: boolean + sourceKind: GithubDiscoveredPluginSourceKind + supported: boolean + warnings: string[] +} + +export type GithubMarketplaceInfo = { + description: string | null + name: string | null + owner: string | null + version: string | null +} + +export type GithubRepoDiscoveryResult = { + classification: GithubDiscoveryClassification + discoveredPlugins: GithubDiscoveredPlugin[] + marketplace: GithubMarketplaceInfo | null + warnings: string[] +} + +type MarketplaceEntry = { + agents?: unknown + commands?: unknown + description?: unknown + hooks?: unknown + mcpServers?: unknown + name?: unknown + settings?: unknown + skills?: unknown + source?: unknown +} + +type PluginMetadata = { + description: string | null + metadata: Record + name: string | null +} + +const KNOWN_COMPONENT_SEGMENTS = ["skills", "commands", "agents"] as const + +function normalizePath(value: string) { + return value.trim().replace(/^\.\//, "").replace(/^\/+/, "").replace(/\/+$/, "") +} + +function joinPath(rootPath: string, childPath: string) { + const root = normalizePath(rootPath) + const child = normalizePath(childPath) + if (!root) return child + if (!child) return root + return `${root}/${child}` +} + +function basename(path: string) { + const normalized = normalizePath(path) + if (!normalized) return null + const parts = normalized.split("/") + return parts[parts.length - 1] ?? null +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null +} + +function asString(value: unknown) { + return typeof value === "string" && value.trim().length > 0 ? value.trim() : null +} + +function pathDirectoryPrefixes(path: string) { + const segments = normalizePath(path).split("/").filter(Boolean) + const prefixes: string[] = [] + for (let index = 1; index <= segments.length; index += 1) { + prefixes.push(segments.slice(0, index).join("/")) + } + return prefixes +} + +function buildPathSet(entries: GithubDiscoveryTreeEntry[]) { + const knownPaths = new Set() + for (const entry of entries) { + const normalizedPath = normalizePath(entry.path) + if (!normalizedPath) continue + knownPaths.add(normalizedPath) + for (const prefix of pathDirectoryPrefixes(normalizedPath)) { + knownPaths.add(prefix) + } + } + return knownPaths +} + +function hasPath(knownPaths: Set, path: string) { + const normalized = normalizePath(path) + return normalized.length > 0 && knownPaths.has(normalized) +} + +function hasDescendant(knownPaths: Set, path: string) { + const normalized = normalizePath(path) + if (!normalized) return false + for (const candidate of knownPaths) { + if (candidate === normalized || candidate.startsWith(`${normalized}/`)) { + return true + } + } + return false +} + +function readJsonMap(fileTextByPath: Record, path: string) { + const text = fileTextByPath[normalizePath(path)] + if (!text) return null + try { + return JSON.parse(text) as unknown + } catch { + return null + } +} + +function readPluginMetadata(fileTextByPath: Record, rootPath: string, manifestPath?: string | null): PluginMetadata { + const manifestCandidate = manifestPath ? normalizePath(manifestPath) : normalizePath(joinPath(rootPath, ".claude-plugin/plugin.json")) + const explicitManifest = manifestCandidate ? readJsonMap(fileTextByPath, manifestCandidate) : null + if (isRecord(explicitManifest)) { + return { + description: asString(explicitManifest.description), + metadata: explicitManifest, + name: asString(explicitManifest.name), + } + } + + const fallbackPluginJson = readJsonMap(fileTextByPath, joinPath(rootPath, "plugin.json")) + if (isRecord(fallbackPluginJson)) { + return { + description: asString(fallbackPluginJson.description), + metadata: fallbackPluginJson, + name: asString(fallbackPluginJson.name), + } + } + + return { + description: null, + metadata: {}, + name: null, + } +} + +function collectComponentPaths(knownPaths: Set, rootPath: string) { + const componentPaths = { + agents: [] as string[], + commands: [] as string[], + hooks: [] as string[], + lspServers: [] as string[], + mcpServers: [] as string[], + monitors: [] as string[], + settings: [] as string[], + skills: [] as string[], + } + + const candidates: Array<[keyof typeof componentPaths, string]> = [ + ["skills", joinPath(rootPath, "skills")], + ["skills", joinPath(rootPath, ".claude/skills")], + ["commands", joinPath(rootPath, "commands")], + ["commands", joinPath(rootPath, ".claude/commands")], + ["agents", joinPath(rootPath, "agents")], + ["agents", joinPath(rootPath, ".claude/agents")], + ["hooks", joinPath(rootPath, "hooks/hooks.json")], + ["mcpServers", joinPath(rootPath, ".mcp.json")], + ["lspServers", joinPath(rootPath, ".lsp.json")], + ["monitors", joinPath(rootPath, "monitors/monitors.json")], + ["settings", joinPath(rootPath, "settings.json")], + ] + + for (const [bucket, candidate] of candidates) { + if (!candidate) continue + if (bucket === "hooks" || bucket === "mcpServers" || bucket === "lspServers" || bucket === "monitors" || bucket === "settings") { + if (hasPath(knownPaths, candidate)) { + componentPaths[bucket].push(candidate) + } + continue + } + + if (hasDescendant(knownPaths, candidate)) { + componentPaths[bucket].push(candidate) + } + } + + return componentPaths +} + +function readStringArray(value: unknown) { + return Array.isArray(value) + ? value.flatMap((entry) => { + const normalized = asString(entry) + return normalized ? [normalized] : [] + }) + : [] +} + +function declaredComponentPaths(input: { + declared: Partial> + knownPaths: Set + rootPath: string +}) { + const collect = (values: unknown, { file, directory }: { file?: boolean; directory?: boolean }) => { + const paths: string[] = [] + for (const value of readStringArray(values)) { + const candidate = joinPath(input.rootPath, value) + if (!candidate && !input.rootPath) { + continue + } + if ((directory && hasDescendant(input.knownPaths, candidate)) || (file && hasPath(input.knownPaths, candidate))) { + paths.push(candidate) + } + } + return paths + } + + return { + agents: collect(input.declared.agents, { directory: true }), + commands: collect(input.declared.commands, { directory: true }), + hooks: collect(input.declared.hooks, { file: true, directory: true }), + lspServers: [], + mcpServers: collect(input.declared.mcpServers, { file: true }), + monitors: [], + settings: collect(input.declared.settings, { file: true }), + skills: collect(input.declared.skills, { directory: true }), + } satisfies GithubDiscoveredPlugin["componentPaths"] +} + +function marketplaceComponentPaths(entry: MarketplaceEntry, knownPaths: Set, rootPath: string) { + return declaredComponentPaths({ + declared: { + agents: entry.agents, + commands: entry.commands, + hooks: entry.hooks, + mcpServers: entry.mcpServers, + settings: entry.settings, + skills: entry.skills, + }, + knownPaths, + rootPath, + }) +} + +function hasAnyComponentPaths(componentPaths: GithubDiscoveredPlugin["componentPaths"]) { + return Object.values(componentPaths).some((paths) => paths.length > 0) +} + +function componentKindsFromPaths(componentPaths: GithubDiscoveredPlugin["componentPaths"]): GithubDiscoveredPluginComponentKind[] { + const kinds: GithubDiscoveredPluginComponentKind[] = [] + if (componentPaths.skills.length > 0) kinds.push("skill") + if (componentPaths.commands.length > 0) kinds.push("command") + if (componentPaths.agents.length > 0) kinds.push("agent") + if (componentPaths.hooks.length > 0) kinds.push("hook") + if (componentPaths.mcpServers.length > 0) kinds.push("mcp_server") + if (componentPaths.lspServers.length > 0) kinds.push("lsp_server") + if (componentPaths.monitors.length > 0) kinds.push("monitor") + if (componentPaths.settings.length > 0) kinds.push("settings") + return kinds +} + +function buildDiscoveredPlugin(input: { + componentPathsOverride?: GithubDiscoveredPlugin["componentPaths"] | null + description?: string | null + displayName?: string | null + fileTextByPath: Record + key: string + knownPaths: Set + manifestPath?: string | null + rootPath: string + sourceKind: GithubDiscoveredPluginSourceKind + supported?: boolean + warnings?: string[] +}) { + const metadata = readPluginMetadata(input.fileTextByPath, input.rootPath, input.manifestPath) + const manifestDeclaredPaths = declaredComponentPaths({ + declared: metadata.metadata, + knownPaths: input.knownPaths, + rootPath: input.rootPath, + }) + const componentPaths = input.componentPathsOverride + ?? (hasAnyComponentPaths(manifestDeclaredPaths) ? manifestDeclaredPaths : collectComponentPaths(input.knownPaths, input.rootPath)) + const displayName = input.displayName?.trim() + || metadata.name + || basename(input.rootPath) + || "Repository plugin" + + return { + componentKinds: componentKindsFromPaths(componentPaths), + componentPaths, + description: input.description ?? metadata.description, + displayName, + key: input.key, + manifestPath: input.manifestPath ? normalizePath(input.manifestPath) : (hasPath(input.knownPaths, joinPath(input.rootPath, ".claude-plugin/plugin.json")) ? joinPath(input.rootPath, ".claude-plugin/plugin.json") : null), + metadata: metadata.metadata, + rootPath: normalizePath(input.rootPath), + selectedByDefault: input.supported !== false, + sourceKind: input.sourceKind, + supported: input.supported !== false, + warnings: input.warnings ?? [], + } satisfies GithubDiscoveredPlugin +} + +function localMarketplaceRoot(entry: MarketplaceEntry) { + if (typeof entry.source === "string") { + return normalizePath(entry.source) + } + + if (!isRecord(entry.source)) { + return null + } + + if (typeof entry.source.url === "string") { + return null + } + + const localPath = asString(entry.source.path) + return localPath ? normalizePath(localPath) : null +} + +function pluginRootsFromManifests(entries: GithubDiscoveryTreeEntry[]) { + return entries + .map((entry) => normalizePath(entry.path)) + .filter((path) => path.endsWith(".claude-plugin/plugin.json")) + .map((path) => path.slice(0, -"/.claude-plugin/plugin.json".length)) +} + +function inferredRootsFromKnownFolders(entries: GithubDiscoveryTreeEntry[]) { + const inferred = new Set() + for (const entry of entries) { + const normalized = normalizePath(entry.path) + if (!normalized) continue + const segments = normalized.split("/") + for (let index = 0; index < segments.length; index += 1) { + const segment = segments[index] + if (!KNOWN_COMPONENT_SEGMENTS.includes(segment as (typeof KNOWN_COMPONENT_SEGMENTS)[number])) { + continue + } + const rootSegments = segments.slice(0, index) + if (rootSegments.length === 1 && rootSegments[0] === ".claude") { + inferred.add("") + continue + } + inferred.add(rootSegments.join("/")) + break + } + } + return [...inferred] +} + +export function buildGithubRepoDiscovery(input: { + entries: GithubDiscoveryTreeEntry[] + fileTextByPath: Record +}) { + const knownPaths = buildPathSet(input.entries) + const warnings: string[] = [] + + if (hasPath(knownPaths, ".claude-plugin/marketplace.json")) { + const marketplaceJson = readJsonMap(input.fileTextByPath, ".claude-plugin/marketplace.json") + const marketplaceEntries = isRecord(marketplaceJson) && Array.isArray(marketplaceJson.plugins) + ? marketplaceJson.plugins.filter(isRecord) as MarketplaceEntry[] + : [] + + const marketplaceInfo: GithubMarketplaceInfo = isRecord(marketplaceJson) + ? { + description: asString(marketplaceJson.description), + name: asString(marketplaceJson.name), + owner: isRecord(marketplaceJson.owner) + ? asString(marketplaceJson.owner.name) ?? asString(marketplaceJson.owner.login) ?? asString(marketplaceJson.owner) + : asString(marketplaceJson.owner), + version: asString(marketplaceJson.version), + } + : { description: null, name: null, owner: null, version: null } + + const discoveredPlugins = marketplaceEntries.map((entry, index) => { + const rootPath = localMarketplaceRoot(entry) + if (rootPath === null) { + const warning = "Marketplace entry points at an external source and cannot be auto-mapped from this connected repo yet." + warnings.push(warning) + return buildDiscoveredPlugin({ + description: asString(entry.description), + displayName: asString(entry.name) ?? `Marketplace plugin ${index + 1}`, + fileTextByPath: input.fileTextByPath, + key: `marketplace:${asString(entry.name) ?? index}`, + knownPaths, + manifestPath: null, + rootPath: "", + sourceKind: "marketplace_entry", + supported: false, + warnings: [warning], + }) + } + + return buildDiscoveredPlugin({ + componentPathsOverride: (() => { + const override = marketplaceComponentPaths(entry, knownPaths, rootPath) + return hasAnyComponentPaths(override) ? override : null + })(), + description: asString(entry.description), + displayName: asString(entry.name), + fileTextByPath: input.fileTextByPath, + key: `marketplace:${rootPath}`, + knownPaths, + manifestPath: joinPath(rootPath, ".claude-plugin/plugin.json"), + rootPath, + sourceKind: "marketplace_entry", + }) + }) + + return { + classification: "claude_marketplace_repo", + discoveredPlugins, + marketplace: marketplaceInfo, + warnings, + } satisfies GithubRepoDiscoveryResult + } + + const manifestRoots = [...new Set(pluginRootsFromManifests(input.entries))] + if (manifestRoots.length > 0) { + const discoveredPlugins = manifestRoots.map((rootPath) => buildDiscoveredPlugin({ + fileTextByPath: input.fileTextByPath, + key: `manifest:${rootPath || "root"}`, + knownPaths, + manifestPath: joinPath(rootPath, ".claude-plugin/plugin.json"), + rootPath, + sourceKind: "plugin_manifest", + })) + + return { + classification: manifestRoots.length === 1 && manifestRoots[0] === "" ? "claude_single_plugin_repo" : "claude_multi_plugin_repo", + discoveredPlugins, + marketplace: null, + warnings, + } satisfies GithubRepoDiscoveryResult + } + + // Intentionally disabled for now: directory-based inference can over-classify + // arbitrary repos as plugins. Until we support a broader compatibility model, + // discovery should only accept explicit Claude plugin markers. + // const inferredRoots = inferredRootsFromKnownFolders(input.entries) + // const standaloneRoot = inferredRoots.includes("") && ( + // hasDescendant(knownPaths, ".claude/skills") + // || hasDescendant(knownPaths, ".claude/commands") + // || hasDescendant(knownPaths, ".claude/agents") + // ) + // const folderRoots = standaloneRoot ? inferredRoots : inferredRoots.filter((root) => root !== "") + // + // if (folderRoots.length > 0) { + // const discoveredPlugins = folderRoots.map((rootPath) => buildDiscoveredPlugin({ + // fileTextByPath: input.fileTextByPath, + // key: `${standaloneRoot && rootPath === "" ? "standalone" : "folder"}:${rootPath || "root"}`, + // knownPaths, + // rootPath, + // sourceKind: standaloneRoot && rootPath === "" ? "standalone_claude" : "folder_inference", + // })) + // + // return { + // classification: "folder_inferred_repo", + // discoveredPlugins, + // warnings, + // } satisfies GithubRepoDiscoveryResult + // } + + warnings.push("OpenWork currently only supports Claude-compatible plugins and marketplaces. Add `.claude-plugin/marketplace.json` or `.claude-plugin/plugin.json` to this repository.") + + return { + classification: "unsupported", + discoveredPlugins: [], + marketplace: null, + warnings, + } satisfies GithubRepoDiscoveryResult +} diff --git a/ee/apps/den-api/src/routes/org/plugin-system/index.ts b/ee/apps/den-api/src/routes/org/plugin-system/index.ts new file mode 100644 index 0000000000..326d85278b --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/index.ts @@ -0,0 +1,7 @@ +export * from "./contracts.js" +export * from "./access.js" +export * from "./github-app.js" +export * from "./github-discovery.js" +export * from "./routes.js" +export * from "./schemas.js" +export * from "./store.js" diff --git a/ee/apps/den-api/src/routes/org/plugin-system/routes.ts b/ee/apps/den-api/src/routes/org/plugin-system/routes.ts new file mode 100644 index 0000000000..7660ffb32a --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/routes.ts @@ -0,0 +1,1963 @@ +import type { Context, Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { queryValidator, jsonValidator, paramValidator, requireUserMiddleware, resolveMemberTeamsMiddleware, resolveOrganizationContextMiddleware } from "../../../middleware/index.js" +import { emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../../openapi.js" +import type { OrgRouteVariables } from "../shared.js" +import { + accessGrantListResponseSchema, + accessGrantMutationResponseSchema, + configObjectAccessGrantParamsSchema, + configObjectCreateSchema, + configObjectCreateVersionSchema, + configObjectDetailResponseSchema, + configObjectListQuerySchema, + configObjectListResponseSchema, + configObjectMutationResponseSchema, + configObjectParamsSchema, + configObjectPluginAttachSchema, + configObjectVersionDetailResponseSchema, + configObjectVersionListQuerySchema, + configObjectVersionListResponseSchema, + configObjectVersionParamsSchema, + connectorAccountCreateSchema, + connectorAccountDetailResponseSchema, + connectorAccountDisconnectSchema, + connectorAccountListQuerySchema, + connectorAccountListResponseSchema, + connectorAccountDisconnectResponseSchema, + connectorAccountMutationResponseSchema, + connectorInstanceAutoImportSchema, + connectorInstanceConfigurationResponseSchema, + connectorInstanceRemoveResponseSchema, + connectorAccountParamsSchema, + connectorAccountRepositoryParamsSchema, + connectorInstanceAccessGrantParamsSchema, + connectorInstanceCreateSchema, + githubConnectorDiscoveryResponseSchema, + githubDiscoveryApplyResponseSchema, + githubDiscoveryApplySchema, + githubDiscoveryTreeQuerySchema, + githubDiscoveryTreeResponseSchema, + connectorInstanceDetailResponseSchema, + connectorInstanceListQuerySchema, + connectorInstanceListResponseSchema, + connectorInstanceMutationResponseSchema, + connectorInstanceParamsSchema, + connectorInstanceUpdateSchema, + connectorMappingCreateSchema, + connectorMappingListQuerySchema, + connectorMappingListResponseSchema, + connectorMappingMutationResponseSchema, + connectorMappingParamsSchema, + connectorMappingUpdateSchema, + connectorSyncAsyncResponseSchema, + connectorSyncEventDetailResponseSchema, + connectorSyncEventListQuerySchema, + connectorSyncEventListResponseSchema, + connectorSyncEventParamsSchema, + connectorTargetCreateSchema, + connectorTargetDetailResponseSchema, + connectorTargetListQuerySchema, + connectorTargetListResponseSchema, + connectorTargetMutationResponseSchema, + connectorTargetParamsSchema, + connectorTargetUpdateSchema, + githubConnectorAccountCreateSchema, + githubInstallCompleteResponseSchema, + githubInstallCompleteSchema, + githubInstallStartResponseSchema, + githubInstallStartSchema, + githubRepositoryListQuerySchema, + githubRepositoryListResponseSchema, + githubSetupResponseSchema, + githubConnectorSetupSchema, + githubValidateTargetResponseSchema, + githubValidateTargetSchema, + marketplaceAccessGrantParamsSchema, + marketplaceCreateSchema, + marketplaceDetailResponseSchema, + marketplaceListQuerySchema, + marketplaceListResponseSchema, + marketplaceMutationResponseSchema, + marketplaceParamsSchema, + marketplacePluginListResponseSchema, + marketplaceResolvedResponseSchema, + marketplacePluginMutationResponseSchema, + marketplacePluginParamsSchema, + marketplacePluginWriteSchema, + marketplaceUpdateSchema, + pluginAccessGrantParamsSchema, + pluginCreateSchema, + pluginDetailResponseSchema, + pluginListQuerySchema, + pluginListResponseSchema, + pluginMembershipListResponseSchema, + pluginMembershipMutationResponseSchema, + pluginMembershipWriteSchema, + pluginMutationResponseSchema, + pluginParamsSchema, + pluginUpdateSchema, + resourceAccessGrantWriteSchema, +} from "./schemas.js" +import { requirePluginArchCapability, type PluginArchActorContext, PluginArchAuthorizationError } from "./access.js" +import { pluginArchRoutePaths } from "./contracts.js" +import { + PluginArchRouteFailure, + addPluginMembership, + attachConfigObjectToPlugin, + createConfigObject, + createConfigObjectVersion, + createConnectorAccount, + createConnectorInstance, + createConnectorMapping, + createGithubConnectorAccount, + createMarketplace, + createPlugin, + createResourceAccessGrant, + createConnectorTarget, + deleteConnectorMapping, + deleteResourceAccessGrant, + disconnectConnectorAccount, + getConfigObjectDetail, + getConfigObjectVersion, + getConnectorAccountDetail, + getConnectorInstanceDetail, + getConnectorSyncEventDetail, + getConnectorTargetDetail, + getLatestConfigObjectVersion, + getMarketplaceDetail, + getMarketplaceResolved, + getPluginDetail, + githubSetup, + listConfigObjectPlugins, + listConfigObjectVersions, + listConfigObjects, + listConnectorAccounts, + listConnectorInstances, + listConnectorMappings, + listConnectorSyncEvents, + listConnectorTargets, + listGithubRepositories, + listMarketplaceMemberships, + listMarketplaces, + listPluginMemberships, + listPlugins, + listResourceAccess, + attachPluginToMarketplace, + completeGithubConnectorInstall, + applyGithubConnectorDiscovery, + getConnectorInstanceConfiguration, + getGithubConnectorDiscovery, + getGithubConnectorDiscoveryTree, + removeConnectorInstance, + setConnectorInstanceAutoImport, + queueConnectorTargetResync, + removeConfigObjectFromPlugin, + removePluginFromMarketplace, + removePluginMembership, + retryConnectorSyncEvent, + setConfigObjectLifecycle, + setConnectorInstanceLifecycle, + setMarketplaceLifecycle, + setPluginLifecycle, + startGithubConnectorInstall, + updateConnectorInstance, + updateConnectorMapping, + updateConnectorTarget, + updateMarketplace, + updatePlugin, + validateGithubTarget, +} from "./store.js" + +type OrgContext = Context<{ Variables: OrgRouteVariables }> + +function validRequestPart(c: OrgContext, target: "json" | "param" | "query") { + return (c.req as unknown as { valid: (part: typeof target) => unknown }).valid(target) as T +} + +function validJson(c: OrgContext) { + return validRequestPart(c, "json") +} + +function validParam(c: OrgContext) { + return validRequestPart(c, "param") +} + +function validQuery(c: OrgContext) { + return validRequestPart(c, "query") +} + +function actorContext(c: OrgContext): PluginArchActorContext { + const organizationContext = c.get("organizationContext") + if (!organizationContext) { + throw new PluginArchRouteFailure(404, "organization_not_found", "Organization context not found.") + } + + return { + memberTeams: c.get("memberTeams") ?? [], + organizationContext, + } +} + +function routeErrorResponse(c: OrgContext, error: unknown) { + if (error instanceof PluginArchAuthorizationError) { + const authorizationError = error as PluginArchAuthorizationError + return c.json({ error: authorizationError.error, message: authorizationError.message }, 403) + } + if (error instanceof PluginArchRouteFailure) { + const failure = error as PluginArchRouteFailure + return c.json({ error: failure.error, message: failure.message }, failure.status) + } + throw error +} + +function withPluginArchOrgContext(app: Hono, method: "delete" | "get" | "patch" | "post", path: string, ...handlers: unknown[]) { + const routeHandler = handlers.pop() as unknown + const routeMiddlewares = handlers as unknown[] + const routeApp = app as unknown as Record unknown> + routeApp[method](path, requireUserMiddleware, ...routeMiddlewares, resolveOrganizationContextMiddleware, resolveMemberTeamsMiddleware, routeHandler) +} + +export function registerPluginArchRoutes(app: Hono) { + withPluginArchOrgContext( + app, + "post", + pluginArchRoutePaths.githubInstallStart, + jsonValidator(githubInstallStartSchema), + describeRoute({ + tags: ["GitHub"], + summary: "Start GitHub install", + description: "Builds a GitHub App install redirect URL for the current organization.", + responses: { + 200: jsonResponse("GitHub install redirect returned successfully.", githubInstallStartResponseSchema), + 400: jsonResponse("The GitHub install request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to connect GitHub.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to connect GitHub.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_account.create") + const body = validJson(c) + return c.json({ ok: true, item: await startGithubConnectorInstall({ context, returnPath: body.returnPath }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }, + ) + + withPluginArchOrgContext( + app, + "post", + pluginArchRoutePaths.githubInstallComplete, + jsonValidator(githubInstallCompleteSchema), + describeRoute({ + tags: ["GitHub"], + summary: "Complete GitHub install", + description: "Completes a GitHub App installation for the current organization and returns visible repositories.", + responses: { + 200: jsonResponse("GitHub installation completed successfully.", githubInstallCompleteResponseSchema), + 400: jsonResponse("The GitHub install completion request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to complete GitHub connection.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to complete GitHub connection.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_account.create") + const body = validJson(c) + return c.json({ ok: true, item: await completeGithubConnectorInstall({ context, installationId: body.installationId, state: body.state }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }, + ) + + withPluginArchOrgContext( + app, + "get", + pluginArchRoutePaths.configObjects, + queryValidator(configObjectListQuerySchema), + describeRoute({ + tags: ["Config Objects"], + summary: "List config objects", + description: "Lists current config object projections visible to the current organization member.", + responses: { + 200: jsonResponse("Config objects returned successfully.", configObjectListResponseSchema), + 400: jsonResponse("The config object query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list config objects.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const query = validQuery(c) + return c.json(await listConfigObjects({ + connectorInstanceId: query.connectorInstanceId, + context: actorContext(c), + cursor: query.cursor, + includeDeleted: query.includeDeleted, + limit: query.limit, + pluginId: query.pluginId, + q: query.q, + sourceMode: query.sourceMode, + status: query.status, + type: query.type, + })) + }, + ) + + withPluginArchOrgContext( + app, + "post", + pluginArchRoutePaths.configObjects, + jsonValidator(configObjectCreateSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Create config object", + description: "Creates a new private config object and initial immutable version.", + responses: { + 201: jsonResponse("Config object created successfully.", configObjectMutationResponseSchema), + 400: jsonResponse("The config object creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create config objects.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to create config objects.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "config_object.create") + const body = validJson(c) + const item = await createConfigObject({ + context, + objectType: body.type, + pluginIds: body.pluginIds, + sourceMode: body.sourceMode, + value: body.input, + }) + return c.json({ ok: true, item }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }, + ) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.configObject, + paramValidator(configObjectParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Get config object", + description: "Returns one config object detail when the caller can view it.", + responses: { + 200: jsonResponse("Config object returned successfully.", configObjectDetailResponseSchema), + 400: jsonResponse("The config object path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view config objects.", unauthorizedSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ item: await getConfigObjectDetail(actorContext(c), params.configObjectId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.configObjectVersions, + paramValidator(configObjectParamsSchema), + jsonValidator(configObjectCreateVersionSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Create config object version", + description: "Creates a new immutable config object version.", + responses: { + 201: jsonResponse("Config object version created successfully.", configObjectMutationResponseSchema), + 400: jsonResponse("The config object version request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create config object versions.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this config object.", forbiddenSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) as any + return c.json({ ok: true, item: await createConfigObjectVersion({ configObjectId: params.configObjectId, context: actorContext(c), reason: body.reason, value: body.input }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.configObjectVersions, + paramValidator(configObjectParamsSchema), + queryValidator(configObjectVersionListQuerySchema), + describeRoute({ + tags: ["Config Objects"], + summary: "List config object versions", + description: "Returns immutable versions for one config object.", + responses: { + 200: jsonResponse("Config object versions returned successfully.", configObjectVersionListResponseSchema), + 400: jsonResponse("The version list request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view config object versions.", unauthorizedSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const query = validQuery(c) + return c.json(await listConfigObjectVersions({ configObjectId: params.configObjectId, context: actorContext(c), cursor: query.cursor, includeDeleted: query.includeDeleted, limit: query.limit })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.configObjectVersion, + paramValidator(configObjectVersionParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Get config object version", + description: "Returns one immutable config object version.", + responses: { + 200: jsonResponse("Config object version returned successfully.", configObjectVersionDetailResponseSchema), + 400: jsonResponse("The version path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view config object versions.", unauthorizedSchema), + 404: jsonResponse("The config object version could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ item: await getConfigObjectVersion({ configObjectId: params.configObjectId, context: actorContext(c), versionId: params.versionId }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.configObjectLatestVersion, + paramValidator(configObjectParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Get latest config object version", + description: "Returns the latest config object version by created_at and id ordering.", + responses: { + 200: jsonResponse("Latest config object version returned successfully.", configObjectVersionDetailResponseSchema), + 400: jsonResponse("The latest-version path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view config object versions.", unauthorizedSchema), + 404: jsonResponse("The config object version could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ item: await getLatestConfigObjectVersion({ configObjectId: params.configObjectId, context: actorContext(c) }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + for (const [path, action] of [[pluginArchRoutePaths.configObjectArchive, "archive"], [pluginArchRoutePaths.configObjectDelete, "delete"], [pluginArchRoutePaths.configObjectRestore, "restore"]] as const) { + withPluginArchOrgContext(app, "post", path, + paramValidator(configObjectParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: `${action} config object`, + description: `${action} a config object without removing its history.`, + responses: { + 200: jsonResponse("Config object lifecycle updated successfully.", configObjectMutationResponseSchema), + 400: jsonResponse("The lifecycle path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage config objects.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage this config object.", forbiddenSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await setConfigObjectLifecycle({ action, configObjectId: params.configObjectId, context: actorContext(c) }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + } + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.configObjectPlugins, + paramValidator(configObjectParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "List config object plugins", + description: "Lists plugins that currently include the config object.", + responses: { + 200: jsonResponse("Config object plugins returned successfully.", pluginMembershipListResponseSchema), + 400: jsonResponse("The config object plugin path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view config object plugins.", unauthorizedSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listConfigObjectPlugins({ configObjectId: params.configObjectId, context: actorContext(c) })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.configObjectPlugins, + paramValidator(configObjectParamsSchema), + jsonValidator(configObjectPluginAttachSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Attach config object to plugin", + description: "Adds a config object to a plugin when the caller can edit the target plugin.", + responses: { + 201: jsonResponse("Plugin membership created successfully.", pluginMembershipMutationResponseSchema), + 400: jsonResponse("The plugin membership request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin membership.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit the target plugin.", forbiddenSchema), + 404: jsonResponse("The config object or plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await attachConfigObjectToPlugin({ configObjectId: params.configObjectId, context: actorContext(c), membershipSource: body.membershipSource, pluginId: body.pluginId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.configObjectPlugin, + paramValidator(configObjectParamsSchema.extend(pluginParamsSchema.pick({ pluginId: true }).shape)), + describeRoute({ + tags: ["Config Objects"], + summary: "Remove config object from plugin", + description: "Removes one active plugin membership from a config object.", + responses: { + 204: emptyResponse("Plugin membership removed successfully."), + 400: jsonResponse("The plugin membership path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin membership.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit the target plugin.", forbiddenSchema), + 404: jsonResponse("The plugin membership could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await removeConfigObjectFromPlugin({ configObjectId: params.configObjectId, context: actorContext(c), pluginId: params.pluginId }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.configObjectAccess, + paramValidator(configObjectParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "List config object access grants", + description: "Lists direct, team, and org-wide grants for one config object.", + responses: { + 200: jsonResponse("Config object access grants returned successfully.", accessGrantListResponseSchema), + 400: jsonResponse("The access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage config object access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage config object access.", forbiddenSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listResourceAccess({ context: actorContext(c), resourceId: params.configObjectId, resourceKind: "config_object" })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.configObjectAccess, + paramValidator(configObjectParamsSchema), + jsonValidator(resourceAccessGrantWriteSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Grant config object access", + description: "Creates or reactivates one access grant for a config object.", + responses: { + 201: jsonResponse("Config object access grant created successfully.", accessGrantMutationResponseSchema), + 400: jsonResponse("The access grant request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage config object access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage config object access.", forbiddenSchema), + 404: jsonResponse("The config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await createResourceAccessGrant({ context: actorContext(c), resourceId: params.configObjectId, resourceKind: "config_object", value: body }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.configObjectAccessGrant, + paramValidator(configObjectAccessGrantParamsSchema), + describeRoute({ + tags: ["Config Objects"], + summary: "Revoke config object access", + description: "Soft-revokes one config object access grant.", + responses: { + 204: emptyResponse("Config object access revoked successfully."), + 400: jsonResponse("The access grant path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage config object access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage config object access.", forbiddenSchema), + 404: jsonResponse("The access grant could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await deleteResourceAccessGrant({ context: actorContext(c), grantId: params.grantId, resourceId: params.configObjectId, resourceKind: "config_object" }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.plugins, + queryValidator(pluginListQuerySchema), + describeRoute({ + tags: ["Plugins"], + summary: "List plugins", + description: "Lists plugins visible to the current organization member.", + responses: { + 200: jsonResponse("Plugins returned successfully.", pluginListResponseSchema), + 400: jsonResponse("The plugin query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list plugins.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const query = validQuery(c) + return c.json(await listPlugins({ context: actorContext(c), cursor: query.cursor, limit: query.limit, q: query.q, status: query.status })) + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.plugins, + jsonValidator(pluginCreateSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Create plugin", + description: "Creates a new private plugin and grants the creator manager access.", + responses: { + 201: jsonResponse("Plugin created successfully.", pluginMutationResponseSchema), + 400: jsonResponse("The plugin creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create plugins.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to create plugins.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "plugin.create") + const body = validJson(c) + return c.json({ ok: true, item: await createPlugin({ context, description: body.description, name: body.name }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.plugin, + paramValidator(pluginParamsSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Get plugin", + description: "Returns one plugin detail when the caller can view it.", + responses: { + 200: jsonResponse("Plugin returned successfully.", pluginDetailResponseSchema), + 400: jsonResponse("The plugin path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view plugins.", unauthorizedSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ item: await getPluginDetail(actorContext(c), params.pluginId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "patch", pluginArchRoutePaths.plugin, + paramValidator(pluginParamsSchema), + jsonValidator(pluginUpdateSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Update plugin", + description: "Updates plugin metadata.", + responses: { + 200: jsonResponse("Plugin updated successfully.", pluginMutationResponseSchema), + 400: jsonResponse("The plugin update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update plugins.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this plugin.", forbiddenSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await updatePlugin({ context: actorContext(c), description: body.description, name: body.name, pluginId: params.pluginId }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + for (const [path, action] of [[pluginArchRoutePaths.pluginArchive, "archive"], [pluginArchRoutePaths.pluginRestore, "restore"]] as const) { + withPluginArchOrgContext(app, "post", path, + paramValidator(pluginParamsSchema), + describeRoute({ + tags: ["Plugins"], + summary: `${action} plugin`, + description: `${action} a plugin without touching its historical memberships.`, + responses: { + 200: jsonResponse("Plugin lifecycle updated successfully.", pluginMutationResponseSchema), + 400: jsonResponse("The plugin lifecycle path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugins.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage this plugin.", forbiddenSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await setPluginLifecycle({ action, context: actorContext(c), pluginId: params.pluginId }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + } + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.pluginConfigObjects, + paramValidator(pluginParamsSchema), + describeRoute({ + tags: ["Plugins"], + summary: "List plugin config objects", + description: "Lists plugin memberships and resolved config object projections.", + responses: { + 200: jsonResponse("Plugin memberships returned successfully.", pluginMembershipListResponseSchema), + 400: jsonResponse("The plugin membership path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view plugin memberships.", unauthorizedSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listPluginMemberships({ context: actorContext(c), includeConfigObjects: true, onlyActive: false, pluginId: params.pluginId })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.pluginConfigObjects, + paramValidator(pluginParamsSchema), + jsonValidator(pluginMembershipWriteSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Add plugin config object", + description: "Adds a config object to a plugin.", + responses: { + 201: jsonResponse("Plugin membership created successfully.", pluginMembershipMutationResponseSchema), + 400: jsonResponse("The plugin membership request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin memberships.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this plugin.", forbiddenSchema), + 404: jsonResponse("The plugin or config object could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await addPluginMembership({ configObjectId: body.configObjectId, context: actorContext(c), membershipSource: body.membershipSource, pluginId: params.pluginId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.pluginConfigObject, + paramValidator(pluginParamsSchema.extend(configObjectParamsSchema.pick({ configObjectId: true }).shape)), + describeRoute({ + tags: ["Plugins"], + summary: "Remove plugin config object", + description: "Removes one config object from a plugin.", + responses: { + 204: emptyResponse("Plugin membership removed successfully."), + 400: jsonResponse("The plugin membership path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin memberships.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this plugin.", forbiddenSchema), + 404: jsonResponse("The plugin membership could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await removePluginMembership({ configObjectId: params.configObjectId, context: actorContext(c), pluginId: params.pluginId }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.pluginResolved, + paramValidator(pluginParamsSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Get resolved plugin", + description: "Lists active plugin memberships with the current config object projection for each item.", + responses: { + 200: jsonResponse("Resolved plugin returned successfully.", pluginMembershipListResponseSchema), + 400: jsonResponse("The plugin path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view resolved plugins.", unauthorizedSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listPluginMemberships({ context: actorContext(c), includeConfigObjects: true, onlyActive: true, pluginId: params.pluginId })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.pluginAccess, + paramValidator(pluginParamsSchema), + describeRoute({ + tags: ["Plugins"], + summary: "List plugin access grants", + description: "Lists direct, team, and org-wide grants for a plugin.", + responses: { + 200: jsonResponse("Plugin access grants returned successfully.", accessGrantListResponseSchema), + 400: jsonResponse("The plugin access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage plugin access.", forbiddenSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listResourceAccess({ context: actorContext(c), resourceId: params.pluginId, resourceKind: "plugin" })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.pluginAccess, + paramValidator(pluginParamsSchema), + jsonValidator(resourceAccessGrantWriteSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Grant plugin access", + description: "Creates or reactivates one access grant for a plugin.", + responses: { + 201: jsonResponse("Plugin access grant created successfully.", accessGrantMutationResponseSchema), + 400: jsonResponse("The plugin access request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage plugin access.", forbiddenSchema), + 404: jsonResponse("The plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await createResourceAccessGrant({ context: actorContext(c), resourceId: params.pluginId, resourceKind: "plugin", value: validJson(c) }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.pluginAccessGrant, + paramValidator(pluginAccessGrantParamsSchema), + describeRoute({ + tags: ["Plugins"], + summary: "Revoke plugin access", + description: "Soft-revokes one plugin access grant.", + responses: { + 204: emptyResponse("Plugin access revoked successfully."), + 400: jsonResponse("The plugin access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage plugin access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage plugin access.", forbiddenSchema), + 404: jsonResponse("The access grant could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await deleteResourceAccessGrant({ context: actorContext(c), grantId: params.grantId, resourceId: params.pluginId, resourceKind: "plugin" }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.marketplaces, + queryValidator(marketplaceListQuerySchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "List marketplaces", + description: "Lists marketplaces visible to the current organization member.", + responses: { + 200: jsonResponse("Marketplaces returned successfully.", marketplaceListResponseSchema), + 400: jsonResponse("The marketplace query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list marketplaces.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const query = validQuery(c) + return c.json(await listMarketplaces({ context: actorContext(c), cursor: query.cursor, limit: query.limit, q: query.q, status: query.status })) + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.marketplaces, + jsonValidator(marketplaceCreateSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Create marketplace", + description: "Creates a new private marketplace and grants the creator manager access.", + responses: { + 201: jsonResponse("Marketplace created successfully.", marketplaceMutationResponseSchema), + 400: jsonResponse("The marketplace creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create marketplaces.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to create marketplaces.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "marketplace.create") + const body = validJson(c) + return c.json({ ok: true, item: await createMarketplace({ context, description: body.description, name: body.name }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.marketplace, + paramValidator(marketplaceParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Get marketplace", + description: "Returns one marketplace detail when the caller can view it.", + responses: { + 200: jsonResponse("Marketplace returned successfully.", marketplaceDetailResponseSchema), + 400: jsonResponse("The marketplace path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view marketplaces.", unauthorizedSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ item: await getMarketplaceDetail(actorContext(c), params.marketplaceId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "patch", pluginArchRoutePaths.marketplace, + paramValidator(marketplaceParamsSchema), + jsonValidator(marketplaceUpdateSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Update marketplace", + description: "Updates marketplace metadata.", + responses: { + 200: jsonResponse("Marketplace updated successfully.", marketplaceMutationResponseSchema), + 400: jsonResponse("The marketplace update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update marketplaces.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this marketplace.", forbiddenSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await updateMarketplace({ context: actorContext(c), description: body.description, marketplaceId: params.marketplaceId, name: body.name }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + for (const [path, action] of [[pluginArchRoutePaths.marketplaceArchive, "archive"], [pluginArchRoutePaths.marketplaceRestore, "restore"]] as const) { + withPluginArchOrgContext(app, "post", path, + paramValidator(marketplaceParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: `${action} marketplace`, + description: `${action} a marketplace without touching membership history.`, + responses: { + 200: jsonResponse("Marketplace lifecycle updated successfully.", marketplaceMutationResponseSchema), + 400: jsonResponse("The marketplace lifecycle path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage marketplaces.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage this marketplace.", forbiddenSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await setMarketplaceLifecycle({ action, context: actorContext(c), marketplaceId: params.marketplaceId }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + } + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.marketplacePlugins, + paramValidator(marketplaceParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "List marketplace plugins", + description: "Lists marketplace memberships and resolved plugin projections.", + responses: { + 200: jsonResponse("Marketplace memberships returned successfully.", marketplacePluginListResponseSchema), + 400: jsonResponse("The marketplace membership path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view marketplace memberships.", unauthorizedSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listMarketplaceMemberships({ context: actorContext(c), includePlugins: true, marketplaceId: params.marketplaceId, onlyActive: false })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.marketplaceResolved, + paramValidator(marketplaceParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Get marketplace resolved", + description: "Returns marketplace detail with plugins and derived source info.", + responses: { + 200: jsonResponse("Marketplace resolved detail returned successfully.", marketplaceResolvedResponseSchema), + 400: jsonResponse("The marketplace path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view marketplaces.", unauthorizedSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await getMarketplaceResolved({ context: actorContext(c), marketplaceId: params.marketplaceId }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.marketplacePlugins, + paramValidator(marketplaceParamsSchema), + jsonValidator(marketplacePluginWriteSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Add marketplace plugin", + description: "Adds a plugin to a marketplace.", + responses: { + 201: jsonResponse("Marketplace membership created successfully.", marketplacePluginMutationResponseSchema), + 400: jsonResponse("The marketplace membership request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage marketplace memberships.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this marketplace.", forbiddenSchema), + 404: jsonResponse("The marketplace or plugin could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await attachPluginToMarketplace({ context: actorContext(c), marketplaceId: params.marketplaceId, membershipSource: body.membershipSource, pluginId: body.pluginId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.marketplacePlugin, + paramValidator(marketplacePluginParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Remove marketplace plugin", + description: "Removes one plugin from a marketplace.", + responses: { + 204: emptyResponse("Marketplace membership removed successfully."), + 400: jsonResponse("The marketplace membership path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage marketplace memberships.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this marketplace.", forbiddenSchema), + 404: jsonResponse("The marketplace membership could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await removePluginFromMarketplace({ context: actorContext(c), marketplaceId: params.marketplaceId, pluginId: params.pluginId }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.marketplaceAccess, + paramValidator(marketplaceParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "List marketplace access grants", + description: "Lists direct, team, and org-wide grants for a marketplace.", + responses: { + 200: jsonResponse("Marketplace access grants returned successfully.", accessGrantListResponseSchema), + 400: jsonResponse("The marketplace access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage marketplace access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage marketplace access.", forbiddenSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listResourceAccess({ context: actorContext(c), resourceId: params.marketplaceId, resourceKind: "marketplace" })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.marketplaceAccess, + paramValidator(marketplaceParamsSchema), + jsonValidator(resourceAccessGrantWriteSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Grant marketplace access", + description: "Creates or reactivates one access grant for a marketplace.", + responses: { + 201: jsonResponse("Marketplace access grant created successfully.", accessGrantMutationResponseSchema), + 400: jsonResponse("The marketplace access request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage marketplace access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage marketplace access.", forbiddenSchema), + 404: jsonResponse("The marketplace could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await createResourceAccessGrant({ context: actorContext(c), resourceId: params.marketplaceId, resourceKind: "marketplace", value: validJson(c) }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.marketplaceAccessGrant, + paramValidator(marketplaceAccessGrantParamsSchema), + describeRoute({ + tags: ["Marketplaces"], + summary: "Revoke marketplace access", + description: "Soft-revokes one marketplace access grant.", + responses: { + 204: emptyResponse("Marketplace access revoked successfully."), + 400: jsonResponse("The marketplace access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage marketplace access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage marketplace access.", forbiddenSchema), + 404: jsonResponse("The access grant could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await deleteResourceAccessGrant({ context: actorContext(c), grantId: params.grantId, resourceId: params.marketplaceId, resourceKind: "marketplace" }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorAccounts, + queryValidator(connectorAccountListQuerySchema), + describeRoute({ + tags: ["Connectors"], + summary: "List connector accounts", + description: "Lists connector accounts for the organization.", + responses: { + 200: jsonResponse("Connector accounts returned successfully.", connectorAccountListResponseSchema), + 400: jsonResponse("The connector account query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list connector accounts.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const query = validQuery(c) + return c.json(await listConnectorAccounts({ connectorType: query.connectorType, context: actorContext(c), cursor: query.cursor, limit: query.limit, q: query.q, status: query.status })) + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorAccounts, + jsonValidator(connectorAccountCreateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Create connector account", + description: "Creates a connector account such as a GitHub App installation binding.", + responses: { + 201: jsonResponse("Connector account created successfully.", connectorAccountMutationResponseSchema), + 400: jsonResponse("The connector account creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create connector accounts.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to create connector accounts.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_account.create") + const body = validJson(c) + return c.json({ ok: true, item: await createConnectorAccount({ connectorType: body.connectorType, context, displayName: body.displayName, externalAccountRef: body.externalAccountRef, metadata: body.metadata, remoteId: body.remoteId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorAccount, + paramValidator(connectorAccountParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Get connector account", + description: "Returns one connector account detail.", + responses: { + 200: jsonResponse("Connector account returned successfully.", connectorAccountDetailResponseSchema), + 400: jsonResponse("The connector account path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view connector accounts.", unauthorizedSchema), + 404: jsonResponse("The connector account could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + return c.json({ item: await getConnectorAccountDetail(actorContext(c), validParam(c).connectorAccountId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorAccountDisconnect, + paramValidator(connectorAccountParamsSchema), + jsonValidator(connectorAccountDisconnectSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Disconnect connector account", + description: "Disconnects a connector account and cleans up all associated connector-managed records.", + responses: { + 200: jsonResponse("Connector account disconnected and cleaned up successfully.", connectorAccountDisconnectResponseSchema), + 400: jsonResponse("The connector account disconnect request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage connector accounts.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage connector accounts.", forbiddenSchema), + 404: jsonResponse("The connector account could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_account.create") + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await disconnectConnectorAccount({ connectorAccountId: params.connectorAccountId, context, reason: body?.reason }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorInstances, + queryValidator(connectorInstanceListQuerySchema), + describeRoute({ + tags: ["Connectors"], + summary: "List connector instances", + description: "Lists connector instances visible to the current member.", + responses: { + 200: jsonResponse("Connector instances returned successfully.", connectorInstanceListResponseSchema), + 400: jsonResponse("The connector instance query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list connector instances.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const query = validQuery(c) + return c.json(await listConnectorInstances({ connectorAccountId: query.connectorAccountId, context: actorContext(c), cursor: query.cursor, limit: query.limit, pluginId: query.pluginId, q: query.q, status: query.status })) + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorInstances, + jsonValidator(connectorInstanceCreateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Create connector instance", + description: "Creates a new connector instance.", + responses: { + 201: jsonResponse("Connector instance created successfully.", connectorInstanceMutationResponseSchema), + 400: jsonResponse("The connector instance creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create connector instances.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to create connector instances.", forbiddenSchema), + 404: jsonResponse("The connector account could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_instance.create") + const body = validJson(c) + return c.json({ ok: true, item: await createConnectorInstance({ connectorAccountId: body.connectorAccountId, connectorType: body.connectorType, config: body.config, context, name: body.name, remoteId: body.remoteId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorInstance, + paramValidator(connectorInstanceParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Get connector instance", + description: "Returns one connector instance detail.", + responses: { + 200: jsonResponse("Connector instance returned successfully.", connectorInstanceDetailResponseSchema), + 400: jsonResponse("The connector instance path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view connector instances.", unauthorizedSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + return c.json({ item: await getConnectorInstanceDetail(actorContext(c), validParam(c).connectorInstanceId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "patch", pluginArchRoutePaths.connectorInstance, + paramValidator(connectorInstanceParamsSchema), + jsonValidator(connectorInstanceUpdateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Update connector instance", + description: "Updates one connector instance.", + responses: { + 200: jsonResponse("Connector instance updated successfully.", connectorInstanceMutationResponseSchema), + 400: jsonResponse("The connector instance update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update connector instances.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await updateConnectorInstance({ connectorInstanceId: params.connectorInstanceId, config: body.config, context: actorContext(c), name: body.name, remoteId: body.remoteId, status: body.status }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + for (const [path, action] of [[pluginArchRoutePaths.connectorInstanceArchive, "archive"], [pluginArchRoutePaths.connectorInstanceDisable, "disable"], [pluginArchRoutePaths.connectorInstanceEnable, "enable"]] as const) { + withPluginArchOrgContext(app, "post", path, + paramValidator(connectorInstanceParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: `${action} connector instance`, + description: `${action} a connector instance.`, + responses: { + 200: jsonResponse("Connector instance updated successfully.", connectorInstanceMutationResponseSchema), + 400: jsonResponse("The connector instance path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage connector instances.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await setConnectorInstanceLifecycle({ action, connectorInstanceId: params.connectorInstanceId, context: actorContext(c) }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + } + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorInstanceConfiguration, + paramValidator(connectorInstanceParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Get connector instance configuration", + description: "Returns the currently configured plugins and import stats for a connector instance.", + responses: { + 200: jsonResponse("Connector instance configuration returned successfully.", connectorInstanceConfigurationResponseSchema), + 400: jsonResponse("The connector instance path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to inspect connector instances.", unauthorizedSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + return c.json({ ok: true, item: await getConnectorInstanceConfiguration({ connectorInstanceId: validParam(c).connectorInstanceId, context: actorContext(c) }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorInstanceRemove, + paramValidator(connectorInstanceParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Remove connector instance", + description: "Removes a connector instance and deletes the plugins, mappings, config objects, and bindings associated with it.", + responses: { + 200: jsonResponse("Connector instance removed and cleaned up successfully.", connectorInstanceRemoveResponseSchema), + 400: jsonResponse("The connector instance path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to remove connector instances.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to remove this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + return c.json({ ok: true, item: await removeConnectorInstance({ connectorInstanceId: validParam(c).connectorInstanceId, context }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorInstanceAutoImport, + paramValidator(connectorInstanceParamsSchema), + jsonValidator(connectorInstanceAutoImportSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Set connector instance auto-import", + description: "Enables or disables auto-import of new plugins on future push webhooks for a connector instance.", + responses: { + 200: jsonResponse("Connector instance auto-import updated successfully.", connectorInstanceConfigurationResponseSchema), + 400: jsonResponse("The auto-import request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to configure connector instances.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to configure this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await setConnectorInstanceAutoImport({ autoImportNewPlugins: Boolean(body.autoImportNewPlugins), connectorInstanceId: params.connectorInstanceId, context }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorInstanceDiscovery, + paramValidator(connectorInstanceParamsSchema), + describeRoute({ + tags: ["GitHub"], + summary: "Get GitHub connector discovery", + description: "Analyzes a GitHub connector target and returns discovered plugin candidates.", + responses: { + 200: jsonResponse("GitHub connector discovery returned successfully.", githubConnectorDiscoveryResponseSchema), + 400: jsonResponse("The connector instance path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to inspect GitHub discovery.", unauthorizedSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + return c.json({ ok: true, item: await getGithubConnectorDiscovery({ connectorInstanceId: validParam(c).connectorInstanceId, context: actorContext(c) }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorInstanceDiscoveryTree, + paramValidator(connectorInstanceParamsSchema), + queryValidator(githubDiscoveryTreeQuerySchema), + describeRoute({ + tags: ["GitHub"], + summary: "List GitHub discovery tree entries", + description: "Pages through the normalized GitHub repository tree used during discovery.", + responses: { + 200: jsonResponse("GitHub discovery tree returned successfully.", githubDiscoveryTreeResponseSchema), + 400: jsonResponse("The discovery tree request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to inspect GitHub discovery tree entries.", unauthorizedSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const query = validQuery(c) + return c.json(await getGithubConnectorDiscoveryTree({ connectorInstanceId: params.connectorInstanceId, context: actorContext(c), cursor: query.cursor, limit: query.limit, prefix: query.prefix })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorInstanceDiscoveryApply, + paramValidator(connectorInstanceParamsSchema), + jsonValidator(githubDiscoveryApplySchema), + describeRoute({ + tags: ["GitHub"], + summary: "Apply GitHub discovery selection", + description: "Creates OpenWork plugins and connector mappings from selected discovery candidates.", + responses: { + 200: jsonResponse("GitHub discovery selection applied successfully.", githubDiscoveryApplyResponseSchema), + 400: jsonResponse("The discovery apply request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to apply discovery selections.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + const context = actorContext(c) + if (Array.isArray(body.selectedKeys) && body.selectedKeys.length > 0) { + await requirePluginArchCapability(context, "plugin.create") + } + return c.json({ ok: true, item: await applyGithubConnectorDiscovery({ autoImportNewPlugins: Boolean(body.autoImportNewPlugins), connectorInstanceId: params.connectorInstanceId, context, selectedKeys: body.selectedKeys }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorInstanceAccess, + paramValidator(connectorInstanceParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "List connector instance access grants", + description: "Lists direct, team, and org-wide grants for a connector instance.", + responses: { + 200: jsonResponse("Connector instance access grants returned successfully.", accessGrantListResponseSchema), + 400: jsonResponse("The connector instance access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage connector instance access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage connector instance access.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json(await listResourceAccess({ context: actorContext(c), resourceId: params.connectorInstanceId, resourceKind: "connector_instance" })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorInstanceAccess, + paramValidator(connectorInstanceParamsSchema), + jsonValidator(resourceAccessGrantWriteSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Grant connector instance access", + description: "Creates or reactivates one access grant for a connector instance.", + responses: { + 201: jsonResponse("Connector instance access grant created successfully.", accessGrantMutationResponseSchema), + 400: jsonResponse("The connector instance access request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage connector instance access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage connector instance access.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + return c.json({ ok: true, item: await createResourceAccessGrant({ context: actorContext(c), resourceId: params.connectorInstanceId, resourceKind: "connector_instance", value: validJson(c) }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.connectorInstanceAccessGrant, + paramValidator(connectorInstanceAccessGrantParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Revoke connector instance access", + description: "Soft-revokes one connector instance access grant.", + responses: { + 204: emptyResponse("Connector instance access revoked successfully."), + 400: jsonResponse("The connector instance access path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage connector instance access.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to manage connector instance access.", forbiddenSchema), + 404: jsonResponse("The access grant could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + await deleteResourceAccessGrant({ context: actorContext(c), grantId: params.grantId, resourceId: params.connectorInstanceId, resourceKind: "connector_instance" }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorTargets, + paramValidator(connectorInstanceParamsSchema), + queryValidator(connectorTargetListQuerySchema), + describeRoute({ + tags: ["Connectors"], + summary: "List connector targets", + description: "Lists connector targets for one connector instance.", + responses: { + 200: jsonResponse("Connector targets returned successfully.", connectorTargetListResponseSchema), + 400: jsonResponse("The connector target query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list connector targets.", unauthorizedSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const query = validQuery(c) + return c.json(await listConnectorTargets({ connectorInstanceId: params.connectorInstanceId, context: actorContext(c), cursor: query.cursor, limit: query.limit, q: query.q, targetKind: query.targetKind })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorTargets, + paramValidator(connectorInstanceParamsSchema), + jsonValidator(connectorTargetCreateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Create connector target", + description: "Creates a connector target under a connector instance.", + responses: { + 201: jsonResponse("Connector target created successfully.", connectorTargetMutationResponseSchema), + 400: jsonResponse("The connector target creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create connector targets.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector instance could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await createConnectorTarget({ config: body.config, connectorInstanceId: params.connectorInstanceId, connectorType: body.connectorType, context: actorContext(c), externalTargetRef: body.externalTargetRef, remoteId: body.remoteId, targetKind: body.targetKind }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorTarget, + paramValidator(connectorTargetParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Get connector target", + description: "Returns one connector target detail.", + responses: { + 200: jsonResponse("Connector target returned successfully.", connectorTargetDetailResponseSchema), + 400: jsonResponse("The connector target path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view connector targets.", unauthorizedSchema), + 404: jsonResponse("The connector target could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + return c.json({ item: await getConnectorTargetDetail(actorContext(c), validParam(c).connectorTargetId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "patch", pluginArchRoutePaths.connectorTarget, + paramValidator(connectorTargetParamsSchema), + jsonValidator(connectorTargetUpdateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Update connector target", + description: "Updates one connector target.", + responses: { + 200: jsonResponse("Connector target updated successfully.", connectorTargetMutationResponseSchema), + 400: jsonResponse("The connector target update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update connector targets.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector target could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await updateConnectorTarget({ config: body.config, connectorTargetId: params.connectorTargetId, context: actorContext(c), externalTargetRef: body.externalTargetRef, remoteId: body.remoteId }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorTargetResync, + paramValidator(connectorTargetParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Resync connector target", + description: "Queues a manual resync for a connector target.", + responses: { + 202: jsonResponse("Connector target resync queued successfully.", connectorSyncAsyncResponseSchema), + 400: jsonResponse("The connector target path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to resync connector targets.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector target could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const job = await queueConnectorTargetResync({ connectorTargetId: validParam(c).connectorTargetId, context: actorContext(c) }) + return c.json({ ok: true, queued: true, job }, 202) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorTargetMappings, + paramValidator(connectorTargetParamsSchema), + queryValidator(connectorMappingListQuerySchema), + describeRoute({ + tags: ["Connectors"], + summary: "List connector mappings", + description: "Lists mappings under a connector target.", + responses: { + 200: jsonResponse("Connector mappings returned successfully.", connectorMappingListResponseSchema), + 400: jsonResponse("The connector mapping query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list connector mappings.", unauthorizedSchema), + 404: jsonResponse("The connector target could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const query = validQuery(c) + return c.json(await listConnectorMappings({ connectorTargetId: params.connectorTargetId, context: actorContext(c), cursor: query.cursor, limit: query.limit, mappingKind: query.mappingKind, objectType: query.objectType, pluginId: query.pluginId, q: query.q })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorTargetMappings, + paramValidator(connectorTargetParamsSchema), + jsonValidator(connectorMappingCreateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Create connector mapping", + description: "Creates a connector mapping.", + responses: { + 201: jsonResponse("Connector mapping created successfully.", connectorMappingMutationResponseSchema), + 400: jsonResponse("The connector mapping creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create connector mappings.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance or target plugin.", forbiddenSchema), + 404: jsonResponse("The connector target could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await createConnectorMapping({ autoAddToPlugin: body.autoAddToPlugin, config: body.config, connectorTargetId: params.connectorTargetId, context: actorContext(c), mappingKind: body.mappingKind, objectType: body.objectType, pluginId: body.pluginId, selector: body.selector }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "patch", pluginArchRoutePaths.connectorMapping, + paramValidator(connectorMappingParamsSchema), + jsonValidator(connectorMappingUpdateSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Update connector mapping", + description: "Updates one connector mapping.", + responses: { + 200: jsonResponse("Connector mapping updated successfully.", connectorMappingMutationResponseSchema), + 400: jsonResponse("The connector mapping update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update connector mappings.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance or target plugin.", forbiddenSchema), + 404: jsonResponse("The connector mapping could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const body = validJson(c) + return c.json({ ok: true, item: await updateConnectorMapping({ autoAddToPlugin: body.autoAddToPlugin, config: body.config, connectorMappingId: params.connectorMappingId, context: actorContext(c), objectType: body.objectType, pluginId: body.pluginId, selector: body.selector }) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "delete", pluginArchRoutePaths.connectorMapping, + paramValidator(connectorMappingParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Delete connector mapping", + description: "Deletes one connector mapping.", + responses: { + 204: emptyResponse("Connector mapping deleted successfully."), + 400: jsonResponse("The connector mapping path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete connector mappings.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector mapping could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + await deleteConnectorMapping({ connectorMappingId: validParam(c).connectorMappingId, context: actorContext(c) }) + return c.body(null, 204) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorSyncEvents, + queryValidator(connectorSyncEventListQuerySchema), + describeRoute({ + tags: ["Connectors"], + summary: "List connector sync events", + description: "Lists connector sync events visible to the current member.", + responses: { + 200: jsonResponse("Connector sync events returned successfully.", connectorSyncEventListResponseSchema), + 400: jsonResponse("The connector sync event query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list connector sync events.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const query = validQuery(c) + return c.json(await listConnectorSyncEvents({ connectorInstanceId: query.connectorInstanceId, connectorTargetId: query.connectorTargetId, context: actorContext(c), cursor: query.cursor, eventType: query.eventType, limit: query.limit, q: query.q, status: query.status })) + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.connectorSyncEvent, + paramValidator(connectorSyncEventParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Get connector sync event", + description: "Returns one connector sync event detail.", + responses: { + 200: jsonResponse("Connector sync event returned successfully.", connectorSyncEventDetailResponseSchema), + 400: jsonResponse("The connector sync event path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to view connector sync events.", unauthorizedSchema), + 404: jsonResponse("The connector sync event could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + return c.json({ item: await getConnectorSyncEventDetail(actorContext(c), validParam(c).connectorSyncEventId) }) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.connectorSyncEventRetry, + paramValidator(connectorSyncEventParamsSchema), + describeRoute({ + tags: ["Connectors"], + summary: "Retry connector sync event", + description: "Re-queues one connector sync event.", + responses: { + 202: jsonResponse("Connector sync event retried successfully.", connectorSyncAsyncResponseSchema), + 400: jsonResponse("The connector sync event path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to retry connector sync events.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to edit this connector instance.", forbiddenSchema), + 404: jsonResponse("The connector sync event could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const job = await retryConnectorSyncEvent({ connectorSyncEventId: validParam(c).connectorSyncEventId, context: actorContext(c) }) + return c.json({ ok: true, queued: true, job }, 202) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.githubAccounts, + jsonValidator(githubConnectorAccountCreateSchema), + describeRoute({ + tags: ["GitHub"], + summary: "Create GitHub connector account", + description: "Persists one GitHub App installation as a connector account.", + responses: { + 201: jsonResponse("GitHub connector account created successfully.", connectorAccountMutationResponseSchema), + 400: jsonResponse("The GitHub account creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create GitHub connector accounts.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to create GitHub connector accounts.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_account.create") + const body = validJson(c) + return c.json({ ok: true, item: await createGithubConnectorAccount({ accountLogin: body.accountLogin, accountType: body.accountType, context, displayName: body.displayName, installationId: body.installationId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.githubSetup, + jsonValidator(githubConnectorSetupSchema), + describeRoute({ + tags: ["GitHub"], + summary: "Setup GitHub connector", + description: "Creates a GitHub connector account, instance, target, and initial mappings in one flow.", + responses: { + 201: jsonResponse("GitHub connector setup created successfully.", githubSetupResponseSchema), + 400: jsonResponse("The GitHub setup request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to setup GitHub connectors.", unauthorizedSchema), + 403: jsonResponse("The caller lacks permission to setup GitHub connectors.", forbiddenSchema), + }, + }), + async (c: OrgContext) => { + try { + const context = actorContext(c) + await requirePluginArchCapability(context, "connector_instance.create") + const body = validJson(c) + return c.json({ ok: true, item: await githubSetup({ branch: body.branch, connectorAccountId: body.connectorAccountId, connectorInstanceName: body.connectorInstanceName, context, installationId: body.installationId, mappings: body.mappings, ref: body.ref, repositoryFullName: body.repositoryFullName, repositoryId: body.repositoryId }) }, 201) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "get", pluginArchRoutePaths.githubAccountRepositories, + paramValidator(connectorAccountRepositoryParamsSchema), + queryValidator(githubRepositoryListQuerySchema), + describeRoute({ + tags: ["GitHub"], + summary: "List GitHub repositories", + description: "Lists repositories visible to one GitHub connector account.", + responses: { + 200: jsonResponse("GitHub repositories returned successfully.", githubRepositoryListResponseSchema), + 400: jsonResponse("The GitHub repository query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list GitHub repositories.", unauthorizedSchema), + 404: jsonResponse("The connector account could not be found.", notFoundSchema), + }, + }), + async (c: OrgContext) => { + try { + const params = validParam(c) + const query = validQuery(c) + return c.json(await listGithubRepositories({ connectorAccountId: params.connectorAccountId, context: actorContext(c), cursor: query.cursor, limit: query.limit, q: query.q })) + } catch (error) { + return routeErrorResponse(c, error) + } + }) + + withPluginArchOrgContext(app, "post", pluginArchRoutePaths.githubValidateTarget, + jsonValidator(githubValidateTargetSchema), + describeRoute({ + tags: ["GitHub"], + summary: "Validate GitHub target", + description: "Validates one repository-branch target before persisting it.", + responses: { + 200: jsonResponse("GitHub target validated successfully.", githubValidateTargetResponseSchema), + 400: jsonResponse("The GitHub target validation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to validate GitHub targets.", unauthorizedSchema), + }, + }), + async (c: OrgContext) => { + const body = validJson(c) + return c.json({ ok: true, item: await validateGithubTarget({ branch: body.branch, installationId: body.installationId, ref: body.ref, repositoryFullName: body.repositoryFullName, repositoryId: body.repositoryId }) }) + }) +} diff --git a/ee/apps/den-api/src/routes/org/plugin-system/schemas.ts b/ee/apps/den-api/src/routes/org/plugin-system/schemas.ts new file mode 100644 index 0000000000..d87e7e5bda --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/schemas.ts @@ -0,0 +1,882 @@ +import { + accessRoleValues, + configObjectCreatedViaValues, + configObjectSourceModeValues, + configObjectStatusValues, + configObjectTypeValues, + connectorAccountStatusValues, + connectorInstanceStatusValues, + connectorMappingKindValues, + connectorSyncEventTypeValues, + connectorSyncStatusValues, + connectorTargetKindValues, + connectorTypeValues, + marketplaceStatusValues, + membershipSourceValues, + pluginStatusValues, +} from "@openwork-ee/den-db/schema" +import { z } from "zod" +import { denTypeIdSchema } from "../../../openapi.js" +import { idParamSchema } from "../shared.js" + +const cursorSchema = z.string().trim().min(1).max(255) +const jsonObjectSchema = z.object({}).passthrough() +const rawSourceTextSchema = z.string().trim().min(1) +const nullableStringSchema = z.string().trim().min(1).nullable() +const nullableTimestampSchema = z.string().datetime({ offset: true }).nullable() +const queryBooleanSchema = z.enum(["true", "false"]).transform((value) => value === "true") + +export const githubWebhookEventValues = ["push", "installation", "installation_repositories", "repository"] as const + +export const configObjectIdSchema = denTypeIdSchema("configObject") +export const configObjectVersionIdSchema = denTypeIdSchema("configObjectVersion") +export const configObjectAccessGrantIdSchema = denTypeIdSchema("configObjectAccessGrant") +export const pluginIdSchema = denTypeIdSchema("plugin") +export const pluginConfigObjectIdSchema = denTypeIdSchema("pluginConfigObject") +export const pluginAccessGrantIdSchema = denTypeIdSchema("pluginAccessGrant") +export const marketplaceIdSchema = denTypeIdSchema("marketplace") +export const marketplacePluginIdSchema = denTypeIdSchema("marketplacePlugin") +export const marketplaceAccessGrantIdSchema = denTypeIdSchema("marketplaceAccessGrant") +export const connectorAccountIdSchema = denTypeIdSchema("connectorAccount") +export const connectorInstanceIdSchema = denTypeIdSchema("connectorInstance") +export const connectorInstanceAccessGrantIdSchema = denTypeIdSchema("connectorInstanceAccessGrant") +export const connectorTargetIdSchema = denTypeIdSchema("connectorTarget") +export const connectorMappingIdSchema = denTypeIdSchema("connectorMapping") +export const connectorSyncEventIdSchema = denTypeIdSchema("connectorSyncEvent") +export const connectorSourceBindingIdSchema = denTypeIdSchema("connectorSourceBinding") +export const connectorSourceTombstoneIdSchema = denTypeIdSchema("connectorSourceTombstone") +export const memberIdSchema = denTypeIdSchema("member") +export const teamIdSchema = denTypeIdSchema("team") + +export const configObjectTypeSchema = z.enum(configObjectTypeValues) +export const configObjectSourceModeSchema = z.enum(configObjectSourceModeValues) +export const configObjectCreatedViaSchema = z.enum(configObjectCreatedViaValues) +export const configObjectStatusSchema = z.enum(configObjectStatusValues) +export const pluginStatusSchema = z.enum(pluginStatusValues) +export const marketplaceStatusSchema = z.enum(marketplaceStatusValues) +export const membershipSourceSchema = z.enum(membershipSourceValues) +export const accessRoleSchema = z.enum(accessRoleValues) +export const connectorTypeSchema = z.enum(connectorTypeValues) +export const connectorAccountStatusSchema = z.enum(connectorAccountStatusValues) +export const connectorInstanceStatusSchema = z.enum(connectorInstanceStatusValues) +export const connectorTargetKindSchema = z.enum(connectorTargetKindValues) +export const connectorMappingKindSchema = z.enum(connectorMappingKindValues) +export const connectorSyncStatusSchema = z.enum(connectorSyncStatusValues) +export const connectorSyncEventTypeSchema = z.enum(connectorSyncEventTypeValues) +export const githubWebhookEventSchema = z.enum(githubWebhookEventValues) + +export const pluginArchPaginationQuerySchema = z.object({ + cursor: cursorSchema.optional(), + limit: z.coerce.number().int().min(1).max(100).optional(), +}) + +export const configObjectListQuerySchema = pluginArchPaginationQuerySchema.extend({ + type: configObjectTypeSchema.optional(), + status: configObjectStatusSchema.optional(), + sourceMode: configObjectSourceModeSchema.optional(), + pluginId: pluginIdSchema.optional(), + connectorInstanceId: connectorInstanceIdSchema.optional(), + includeDeleted: queryBooleanSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const configObjectVersionListQuerySchema = pluginArchPaginationQuerySchema.extend({ + includeDeleted: queryBooleanSchema.optional(), +}) + +export const pluginListQuerySchema = pluginArchPaginationQuerySchema.extend({ + status: pluginStatusSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const marketplaceListQuerySchema = pluginArchPaginationQuerySchema.extend({ + status: marketplaceStatusSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const connectorAccountListQuerySchema = pluginArchPaginationQuerySchema.extend({ + connectorType: connectorTypeSchema.optional(), + status: connectorAccountStatusSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const connectorInstanceListQuerySchema = pluginArchPaginationQuerySchema.extend({ + connectorAccountId: connectorAccountIdSchema.optional(), + connectorType: connectorTypeSchema.optional(), + pluginId: pluginIdSchema.optional(), + status: connectorInstanceStatusSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const connectorTargetListQuerySchema = pluginArchPaginationQuerySchema.extend({ + targetKind: connectorTargetKindSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const connectorMappingListQuerySchema = pluginArchPaginationQuerySchema.extend({ + mappingKind: connectorMappingKindSchema.optional(), + objectType: configObjectTypeSchema.optional(), + pluginId: pluginIdSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const connectorSyncEventListQuerySchema = pluginArchPaginationQuerySchema.extend({ + connectorInstanceId: connectorInstanceIdSchema.optional(), + connectorTargetId: connectorTargetIdSchema.optional(), + eventType: connectorSyncEventTypeSchema.optional(), + status: connectorSyncStatusSchema.optional(), + q: z.string().trim().min(1).max(255).optional(), +}) + +export const githubRepositoryListQuerySchema = pluginArchPaginationQuerySchema.extend({ + q: z.string().trim().min(1).max(255).optional(), +}) + +export const configObjectParamsSchema = idParamSchema("configObjectId", "configObject") +export const configObjectVersionParamsSchema = configObjectParamsSchema.extend(idParamSchema("versionId", "configObjectVersion").shape) +export const configObjectAccessGrantParamsSchema = configObjectParamsSchema.extend(idParamSchema("grantId", "configObjectAccessGrant").shape) +export const pluginParamsSchema = idParamSchema("pluginId", "plugin") +export const pluginConfigObjectParamsSchema = pluginParamsSchema.extend(idParamSchema("configObjectId", "configObject").shape) +export const pluginAccessGrantParamsSchema = pluginParamsSchema.extend(idParamSchema("grantId", "pluginAccessGrant").shape) +export const marketplaceParamsSchema = idParamSchema("marketplaceId", "marketplace") +export const marketplacePluginParamsSchema = marketplaceParamsSchema.extend(idParamSchema("pluginId", "plugin").shape) +export const marketplaceAccessGrantParamsSchema = marketplaceParamsSchema.extend(idParamSchema("grantId", "marketplaceAccessGrant").shape) +export const connectorAccountParamsSchema = idParamSchema("connectorAccountId", "connectorAccount") +export const connectorInstanceParamsSchema = idParamSchema("connectorInstanceId", "connectorInstance") +export const connectorInstanceAccessGrantParamsSchema = connectorInstanceParamsSchema.extend(idParamSchema("grantId", "connectorInstanceAccessGrant").shape) +export const connectorTargetParamsSchema = idParamSchema("connectorTargetId", "connectorTarget") +export const connectorMappingParamsSchema = idParamSchema("connectorMappingId", "connectorMapping") +export const connectorSyncEventParamsSchema = idParamSchema("connectorSyncEventId", "connectorSyncEvent") + +export const connectorAccountRepositoryParamsSchema = connectorAccountParamsSchema + +export const configObjectInputSchema = z.object({ + rawSourceText: rawSourceTextSchema.optional(), + normalizedPayloadJson: jsonObjectSchema.optional(), + parserMode: z.string().trim().min(1).max(100).optional(), + schemaVersion: z.string().trim().min(1).max(100).optional(), + metadata: jsonObjectSchema.optional(), +}).superRefine((value, ctx) => { + if (!value.rawSourceText && !value.normalizedPayloadJson) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide either rawSourceText or normalizedPayloadJson.", + path: ["rawSourceText"], + }) + } +}) + +export const configObjectCreateSchema = z.object({ + type: configObjectTypeSchema, + sourceMode: configObjectSourceModeSchema, + pluginIds: z.array(pluginIdSchema).max(100).optional(), + input: configObjectInputSchema, +}) + +export const configObjectCreateVersionSchema = z.object({ + input: configObjectInputSchema, + reason: z.string().trim().min(1).max(255).optional(), +}) + +export const configObjectPluginAttachSchema = z.object({ + pluginId: pluginIdSchema, + membershipSource: membershipSourceSchema.optional(), +}) + +export const resourceAccessGrantWriteSchema = z.object({ + orgMembershipId: memberIdSchema.optional(), + teamId: teamIdSchema.optional(), + orgWide: z.boolean().optional().default(false), + role: accessRoleSchema, +}).superRefine((value, ctx) => { + const count = Number(Boolean(value.orgMembershipId)) + Number(Boolean(value.teamId)) + Number(Boolean(value.orgWide)) + if (count !== 1) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide exactly one of orgMembershipId, teamId, or orgWide=true.", + path: ["orgMembershipId"], + }) + } +}) + +export const pluginCreateSchema = z.object({ + name: z.string().trim().min(1).max(255), + description: nullableStringSchema.optional(), +}) + +export const pluginUpdateSchema = z.object({ + name: z.string().trim().min(1).max(255).optional(), + description: nullableStringSchema.optional(), +}).superRefine((value, ctx) => { + if (value.name === undefined && value.description === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide at least one field to update.", + path: ["name"], + }) + } +}) + +export const marketplaceCreateSchema = z.object({ + name: z.string().trim().min(1).max(255), + description: nullableStringSchema.optional(), +}) + +export const marketplaceUpdateSchema = z.object({ + name: z.string().trim().min(1).max(255).optional(), + description: nullableStringSchema.optional(), +}).superRefine((value, ctx) => { + if (value.name === undefined && value.description === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide at least one field to update.", + path: ["name"], + }) + } +}) + +export const pluginMembershipWriteSchema = z.object({ + configObjectId: configObjectIdSchema, + membershipSource: membershipSourceSchema.optional(), +}) + +export const marketplacePluginWriteSchema = z.object({ + pluginId: pluginIdSchema, + membershipSource: membershipSourceSchema.optional(), +}) + +export const connectorAccountCreateSchema = z.object({ + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255), + externalAccountRef: z.string().trim().min(1).max(255).nullable().optional(), + displayName: z.string().trim().min(1).max(255), + metadata: jsonObjectSchema.optional(), +}) + +export const connectorAccountDisconnectSchema = z.object({ + reason: z.string().trim().min(1).max(255).optional(), +}).optional() + +export const connectorInstanceCreateSchema = z.object({ + connectorAccountId: connectorAccountIdSchema, + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255).nullable().optional(), + name: z.string().trim().min(1).max(255), + config: jsonObjectSchema.optional(), +}) + +export const connectorInstanceUpdateSchema = z.object({ + remoteId: z.string().trim().min(1).max(255).nullable().optional(), + name: z.string().trim().min(1).max(255).optional(), + status: connectorInstanceStatusSchema.optional(), + config: jsonObjectSchema.optional(), +}).superRefine((value, ctx) => { + if (value.remoteId === undefined && value.name === undefined && value.status === undefined && value.config === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide at least one field to update.", + path: ["name"], + }) + } +}) + +export const connectorTargetCreateSchema = z.object({ + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255), + targetKind: connectorTargetKindSchema, + externalTargetRef: z.string().trim().min(1).max(255).nullable().optional(), + config: jsonObjectSchema, +}) + +export const connectorTargetUpdateSchema = z.object({ + remoteId: z.string().trim().min(1).max(255).optional(), + externalTargetRef: z.string().trim().min(1).max(255).nullable().optional(), + config: jsonObjectSchema.optional(), +}).superRefine((value, ctx) => { + if (value.remoteId === undefined && value.externalTargetRef === undefined && value.config === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide at least one field to update.", + path: ["remoteId"], + }) + } +}) + +export const connectorMappingCreateSchema = z.object({ + mappingKind: connectorMappingKindSchema, + selector: z.string().trim().min(1).max(255), + objectType: configObjectTypeSchema, + pluginId: pluginIdSchema.nullable().optional(), + autoAddToPlugin: z.boolean().default(false), + config: jsonObjectSchema.optional(), +}) + +export const connectorMappingUpdateSchema = z.object({ + selector: z.string().trim().min(1).max(255).optional(), + objectType: configObjectTypeSchema.optional(), + pluginId: pluginIdSchema.nullable().optional(), + autoAddToPlugin: z.boolean().optional(), + config: jsonObjectSchema.optional(), +}).superRefine((value, ctx) => { + if ( + value.selector === undefined + && value.objectType === undefined + && value.pluginId === undefined + && value.autoAddToPlugin === undefined + && value.config === undefined + ) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Provide at least one field to update.", + path: ["selector"], + }) + } +}) + +export const githubConnectorSetupSchema = z.object({ + installationId: z.number().int().positive(), + connectorAccountId: connectorAccountIdSchema.optional(), + connectorInstanceName: z.string().trim().min(1).max(255), + repositoryId: z.number().int().positive(), + repositoryFullName: z.string().trim().min(1).max(255), + branch: z.string().trim().min(1).max(255), + ref: z.string().trim().min(1).max(255), + mappings: z.array(connectorMappingCreateSchema).max(100).default([]), +}) + +export const githubInstallStartSchema = z.object({ + returnPath: z.string().trim().min(1).max(1024), +}) + +export const githubInstallCompleteSchema = z.object({ + installationId: z.number().int().positive(), + state: z.string().trim().min(1).max(4096), +}) + +export const githubDiscoveryApplySchema = z.object({ + autoImportNewPlugins: z.boolean().default(false), + selectedKeys: z.array(z.string().trim().min(1).max(255)).max(200), +}) + +export const githubDiscoveryTreeQuerySchema = z.object({ + cursor: z.string().trim().min(1).max(255).optional(), + limit: z.coerce.number().int().positive().max(500).optional(), + prefix: z.string().trim().min(1).max(1024).optional(), +}) + +export const githubConnectorAccountCreateSchema = z.object({ + installationId: z.number().int().positive(), + accountLogin: z.string().trim().min(1).max(255), + accountType: z.enum(["Organization", "User"]), + displayName: z.string().trim().min(1).max(255), +}) + +export const githubValidateTargetSchema = z.object({ + installationId: z.number().int().positive(), + repositoryId: z.number().int().positive(), + repositoryFullName: z.string().trim().min(1).max(255), + branch: z.string().trim().min(1).max(255), + ref: z.string().trim().min(1).max(255), +}) + +export const accessGrantSchema = z.object({ + id: z.union([configObjectAccessGrantIdSchema, pluginAccessGrantIdSchema, marketplaceAccessGrantIdSchema, connectorInstanceAccessGrantIdSchema]), + orgMembershipId: memberIdSchema.nullable(), + teamId: teamIdSchema.nullable(), + orgWide: z.boolean(), + role: accessRoleSchema, + createdByOrgMembershipId: memberIdSchema, + createdAt: z.string().datetime({ offset: true }), + removedAt: nullableTimestampSchema, +}).meta({ ref: "PluginArchAccessGrant" }) + +export const configObjectVersionSchema = z.object({ + id: configObjectVersionIdSchema, + configObjectId: configObjectIdSchema, + schemaVersion: z.string().trim().min(1).max(100).nullable(), + normalizedPayloadJson: jsonObjectSchema.nullable(), + rawSourceText: z.string().nullable(), + createdVia: configObjectCreatedViaSchema, + createdByOrgMembershipId: memberIdSchema.nullable(), + connectorSyncEventId: connectorSyncEventIdSchema.nullable(), + sourceRevisionRef: z.string().trim().min(1).max(255).nullable(), + isDeletedVersion: z.boolean(), + createdAt: z.string().datetime({ offset: true }), +}).meta({ ref: "PluginArchConfigObjectVersion" }) + +export const configObjectSchema = z.object({ + id: configObjectIdSchema, + organizationId: denTypeIdSchema("organization"), + objectType: configObjectTypeSchema, + sourceMode: configObjectSourceModeSchema, + title: z.string().trim().min(1).max(255), + description: nullableStringSchema, + searchText: z.string().trim().min(1).max(65535).nullable(), + currentFileName: z.string().trim().min(1).max(255).nullable(), + currentFileExtension: z.string().trim().min(1).max(32).nullable(), + currentRelativePath: z.string().trim().min(1).max(255).nullable(), + status: configObjectStatusSchema, + createdByOrgMembershipId: memberIdSchema, + connectorInstanceId: connectorInstanceIdSchema.nullable(), + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), + deletedAt: nullableTimestampSchema, + latestVersion: configObjectVersionSchema.nullable(), +}).meta({ ref: "PluginArchConfigObject" }) + +export const pluginMembershipSchema = z.object({ + id: pluginConfigObjectIdSchema, + pluginId: pluginIdSchema, + configObjectId: configObjectIdSchema, + membershipSource: membershipSourceSchema, + connectorMappingId: connectorMappingIdSchema.nullable(), + createdByOrgMembershipId: memberIdSchema.nullable(), + createdAt: z.string().datetime({ offset: true }), + removedAt: nullableTimestampSchema, + configObject: configObjectSchema.optional(), +}).meta({ ref: "PluginArchPluginMembership" }) + +export const pluginSchema = z.object({ + id: pluginIdSchema, + organizationId: denTypeIdSchema("organization"), + name: z.string().trim().min(1).max(255), + description: nullableStringSchema, + status: pluginStatusSchema, + createdByOrgMembershipId: memberIdSchema, + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), + deletedAt: nullableTimestampSchema, + memberCount: z.number().int().nonnegative().optional(), + marketplaces: z.array(z.object({ + id: marketplaceIdSchema, + name: z.string().trim().min(1).max(255), + })).optional(), +}).meta({ ref: "PluginArchPlugin" }) + +export const marketplacePluginSchema = z.object({ + id: marketplacePluginIdSchema, + marketplaceId: marketplaceIdSchema, + pluginId: pluginIdSchema, + membershipSource: membershipSourceSchema, + createdByOrgMembershipId: memberIdSchema.nullable(), + createdAt: z.string().datetime({ offset: true }), + removedAt: nullableTimestampSchema, + plugin: pluginSchema.optional(), +}).meta({ ref: "PluginArchMarketplacePluginMembership" }) + +export const marketplaceSchema = z.object({ + id: marketplaceIdSchema, + organizationId: denTypeIdSchema("organization"), + name: z.string().trim().min(1).max(255), + description: nullableStringSchema, + status: marketplaceStatusSchema, + createdByOrgMembershipId: memberIdSchema, + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), + deletedAt: nullableTimestampSchema, + pluginCount: z.number().int().nonnegative().optional(), +}).meta({ ref: "PluginArchMarketplace" }) + +export const connectorAccountSchema = z.object({ + id: connectorAccountIdSchema, + organizationId: denTypeIdSchema("organization"), + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255), + externalAccountRef: z.string().trim().min(1).max(255).nullable(), + displayName: z.string().trim().min(1).max(255), + status: connectorAccountStatusSchema, + createdByName: z.string().trim().min(1).max(255).nullable().optional(), + createdByOrgMembershipId: memberIdSchema, + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), + metadata: jsonObjectSchema.optional(), +}).meta({ ref: "PluginArchConnectorAccount" }) + +export const connectorInstanceSchema = z.object({ + id: connectorInstanceIdSchema, + organizationId: denTypeIdSchema("organization"), + connectorAccountId: connectorAccountIdSchema, + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255).nullable(), + name: z.string().trim().min(1).max(255), + status: connectorInstanceStatusSchema, + instanceConfigJson: jsonObjectSchema.nullable(), + lastSyncedAt: nullableTimestampSchema, + lastSyncStatus: connectorSyncStatusSchema.nullable(), + lastSyncCursor: z.string().trim().min(1).max(255).nullable(), + createdByOrgMembershipId: memberIdSchema, + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), +}).meta({ ref: "PluginArchConnectorInstance" }) + +export const connectorTargetSchema = z.object({ + id: connectorTargetIdSchema, + connectorInstanceId: connectorInstanceIdSchema, + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255), + targetKind: connectorTargetKindSchema, + externalTargetRef: z.string().trim().min(1).max(255).nullable(), + targetConfigJson: jsonObjectSchema, + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), +}).meta({ ref: "PluginArchConnectorTarget" }) + +export const connectorMappingSchema = z.object({ + id: connectorMappingIdSchema, + connectorInstanceId: connectorInstanceIdSchema, + connectorTargetId: connectorTargetIdSchema, + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255).nullable(), + mappingKind: connectorMappingKindSchema, + selector: z.string().trim().min(1).max(255), + objectType: configObjectTypeSchema, + pluginId: pluginIdSchema.nullable(), + autoAddToPlugin: z.boolean(), + mappingConfigJson: jsonObjectSchema.nullable(), + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), +}).meta({ ref: "PluginArchConnectorMapping" }) + +export const connectorSyncSummarySchema = z.object({ + createdCount: z.number().int().nonnegative().optional(), + updatedCount: z.number().int().nonnegative().optional(), + deletedCount: z.number().int().nonnegative().optional(), + skippedCount: z.number().int().nonnegative().optional(), + failedCount: z.number().int().nonnegative().optional(), + failures: z.array(jsonObjectSchema).optional(), +}).passthrough().meta({ ref: "PluginArchConnectorSyncSummary" }) + +export const connectorSyncEventSchema = z.object({ + id: connectorSyncEventIdSchema, + connectorInstanceId: connectorInstanceIdSchema, + connectorTargetId: connectorTargetIdSchema.nullable(), + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255).nullable(), + eventType: connectorSyncEventTypeSchema, + externalEventRef: z.string().trim().min(1).max(255).nullable(), + sourceRevisionRef: z.string().trim().min(1).max(255).nullable(), + status: connectorSyncStatusSchema, + summaryJson: connectorSyncSummarySchema.nullable(), + startedAt: z.string().datetime({ offset: true }), + completedAt: nullableTimestampSchema, +}).meta({ ref: "PluginArchConnectorSyncEvent" }) + +export const connectorSourceBindingSchema = z.object({ + id: connectorSourceBindingIdSchema, + configObjectId: configObjectIdSchema, + connectorInstanceId: connectorInstanceIdSchema, + connectorTargetId: connectorTargetIdSchema, + connectorMappingId: connectorMappingIdSchema, + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255).nullable(), + externalLocator: z.string().trim().min(1).max(255), + externalStableRef: z.string().trim().min(1).max(255).nullable(), + lastSeenSourceRevisionRef: z.string().trim().min(1).max(255).nullable(), + status: configObjectStatusSchema, + createdAt: z.string().datetime({ offset: true }), + updatedAt: z.string().datetime({ offset: true }), + deletedAt: nullableTimestampSchema, +}).meta({ ref: "PluginArchConnectorSourceBinding" }) + +export const connectorSourceTombstoneSchema = z.object({ + id: connectorSourceTombstoneIdSchema, + connectorInstanceId: connectorInstanceIdSchema, + connectorTargetId: connectorTargetIdSchema, + connectorMappingId: connectorMappingIdSchema, + connectorType: connectorTypeSchema, + remoteId: z.string().trim().min(1).max(255).nullable(), + externalLocator: z.string().trim().min(1).max(255), + formerConfigObjectId: configObjectIdSchema, + deletedInSyncEventId: connectorSyncEventIdSchema, + deletedSourceRevisionRef: z.string().trim().min(1).max(255).nullable(), + createdAt: z.string().datetime({ offset: true }), +}).meta({ ref: "PluginArchConnectorSourceTombstone" }) + +export const githubWebhookHeadersSchema = z.object({ + xHubSignature256: z.string().trim().min(1), + xGithubEvent: githubWebhookEventSchema, + xGithubDelivery: z.string().trim().min(1), +}).meta({ ref: "PluginArchGithubWebhookHeaders" }) + +export const githubWebhookPayloadSchema = z.object({ + after: z.string().trim().min(1).optional(), + installation: z.object({ + id: z.number().int().positive(), + }).passthrough().optional(), + ref: z.string().trim().min(1).optional(), + repository: z.object({ + full_name: z.string().trim().min(1), + id: z.number().int().positive(), + }).passthrough().optional(), +}).passthrough().meta({ ref: "PluginArchGithubWebhookPayload" }) + +export const githubWebhookEnvelopeSchema = z.object({ + deliveryId: z.string().trim().min(1), + event: githubWebhookEventSchema, + installationId: z.number().int().positive().optional(), + repositoryId: z.number().int().positive().optional(), + repositoryFullName: z.string().trim().min(1).optional(), + ref: z.string().trim().min(1).optional(), + headSha: z.string().trim().min(1).optional(), + payload: githubWebhookPayloadSchema, +}).meta({ ref: "PluginArchGithubWebhookEnvelope" }) + +export const githubConnectorSyncJobSchema = z.object({ + connectorType: z.literal("github"), + connectorInstanceId: connectorInstanceIdSchema, + connectorTargetId: connectorTargetIdSchema, + connectorSyncEventId: connectorSyncEventIdSchema, + deliveryId: z.string().trim().min(1), + installationId: z.number().int().positive(), + repositoryId: z.number().int().positive(), + repositoryFullName: z.string().trim().min(1), + ref: z.string().trim().min(1), + headSha: z.string().trim().min(1), +}).meta({ ref: "PluginArchGithubConnectorSyncJob" }) + +export const githubWebhookRawBodySchema = z.string().min(1).meta({ ref: "PluginArchGithubWebhookRawBody" }) + +export const githubWebhookAcceptedResponseSchema = z.object({ + ok: z.literal(true), + accepted: z.literal(true), + event: githubWebhookEventSchema, + deliveryId: z.string().trim().min(1), + queued: z.boolean(), +}).meta({ ref: "PluginArchGithubWebhookAcceptedResponse" }) + +export const githubWebhookIgnoredResponseSchema = z.object({ + ok: z.literal(true), + accepted: z.literal(false), + reason: z.string().trim().min(1), +}).meta({ ref: "PluginArchGithubWebhookIgnoredResponse" }) + +export const githubWebhookUnauthorizedResponseSchema = z.object({ + ok: z.literal(false), + error: z.literal("invalid signature"), +}).meta({ ref: "PluginArchGithubWebhookUnauthorizedResponse" }) + +export function pluginArchListResponseSchema(ref: string, itemSchema: TSchema) { + return z.object({ + items: z.array(itemSchema), + nextCursor: cursorSchema.nullable(), + }).meta({ ref }) +} + +export function pluginArchDetailResponseSchema(ref: string, itemSchema: TSchema) { + return z.object({ + item: itemSchema, + }).meta({ ref }) +} + +export function pluginArchMutationResponseSchema(ref: string, itemSchema: TSchema) { + return z.object({ + ok: z.literal(true), + item: itemSchema, + }).meta({ ref }) +} + +export function pluginArchAsyncResponseSchema(ref: string, jobSchema: TSchema) { + return z.object({ + ok: z.literal(true), + queued: z.literal(true), + job: jobSchema, + }).meta({ ref }) +} + +export const configObjectListResponseSchema = pluginArchListResponseSchema("PluginArchConfigObjectListResponse", configObjectSchema) +export const configObjectDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchConfigObjectDetailResponse", configObjectSchema) +export const configObjectMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchConfigObjectMutationResponse", configObjectSchema) +export const configObjectVersionListResponseSchema = pluginArchListResponseSchema("PluginArchConfigObjectVersionListResponse", configObjectVersionSchema) +export const configObjectVersionDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchConfigObjectVersionDetailResponse", configObjectVersionSchema) +export const pluginListResponseSchema = pluginArchListResponseSchema("PluginArchPluginListResponse", pluginSchema) +export const pluginDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchPluginDetailResponse", pluginSchema) +export const pluginMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchPluginMutationResponse", pluginSchema) +export const pluginMembershipListResponseSchema = pluginArchListResponseSchema("PluginArchPluginMembershipListResponse", pluginMembershipSchema) +export const pluginMembershipDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchPluginMembershipDetailResponse", pluginMembershipSchema) +export const pluginMembershipMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchPluginMembershipMutationResponse", pluginMembershipSchema) +export const marketplaceListResponseSchema = pluginArchListResponseSchema("PluginArchMarketplaceListResponse", marketplaceSchema) +export const marketplaceDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchMarketplaceDetailResponse", marketplaceSchema) +export const marketplaceMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchMarketplaceMutationResponse", marketplaceSchema) + +export const marketplaceResolvedResponseSchema = pluginArchMutationResponseSchema( + "PluginArchMarketplaceResolvedResponse", + z.object({ + marketplace: marketplaceSchema, + plugins: z.array(pluginSchema.extend({ + componentCounts: z.record(z.string(), z.number().int().nonnegative()).default({}), + })), + source: z.object({ + connectorAccountId: connectorAccountIdSchema, + connectorInstanceId: connectorInstanceIdSchema, + accountLogin: z.string().trim().min(1).nullable(), + repositoryFullName: z.string().trim().min(1), + branch: z.string().trim().min(1).nullable(), + }).nullable(), + }), +) +export const marketplacePluginListResponseSchema = pluginArchListResponseSchema("PluginArchMarketplacePluginListResponse", marketplacePluginSchema) +export const marketplacePluginMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchMarketplacePluginMutationResponse", marketplacePluginSchema) +export const accessGrantListResponseSchema = pluginArchListResponseSchema("PluginArchAccessGrantListResponse", accessGrantSchema) +export const accessGrantMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchAccessGrantMutationResponse", accessGrantSchema) +export const connectorAccountListResponseSchema = pluginArchListResponseSchema("PluginArchConnectorAccountListResponse", connectorAccountSchema) +export const connectorAccountDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchConnectorAccountDetailResponse", connectorAccountSchema) +export const connectorAccountMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchConnectorAccountMutationResponse", connectorAccountSchema) +export const connectorAccountDisconnectResponseSchema = pluginArchMutationResponseSchema( + "PluginArchConnectorAccountDisconnectResponse", + z.object({ + deletedConfigObjectCount: z.number().int().nonnegative(), + deletedConnectorInstanceCount: z.number().int().nonnegative(), + deletedConnectorMappingCount: z.number().int().nonnegative(), + disconnectedAccountId: connectorAccountIdSchema, + reason: z.string().trim().nullable(), + }), +) +export const connectorInstanceConfiguredPluginSchema = pluginSchema.extend({ + componentCounts: z.record(z.string(), z.number().int().nonnegative()).default({}), + rootPath: z.string().nullable(), +}).meta({ ref: "PluginArchConnectorInstanceConfiguredPlugin" }) + +export const connectorInstanceConfigurationResponseSchema = pluginArchMutationResponseSchema( + "PluginArchConnectorInstanceConfigurationResponse", + z.object({ + autoImportNewPlugins: z.boolean(), + configuredPlugins: z.array(connectorInstanceConfiguredPluginSchema), + connectorInstance: connectorInstanceSchema, + importedConfigObjectCount: z.number().int().nonnegative(), + mappingCount: z.number().int().nonnegative(), + }), +) +export const connectorInstanceAutoImportSchema = z.object({ + autoImportNewPlugins: z.boolean(), +}) +export const connectorInstanceRemoveResponseSchema = pluginArchMutationResponseSchema( + "PluginArchConnectorInstanceRemoveResponse", + z.object({ + deletedConfigObjectCount: z.number().int().nonnegative(), + deletedConnectorMappingCount: z.number().int().nonnegative(), + removedConnectorInstanceId: connectorInstanceIdSchema, + }), +) +export const connectorInstanceListResponseSchema = pluginArchListResponseSchema("PluginArchConnectorInstanceListResponse", connectorInstanceSchema) +export const connectorInstanceDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchConnectorInstanceDetailResponse", connectorInstanceSchema) +export const connectorInstanceMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchConnectorInstanceMutationResponse", connectorInstanceSchema) +export const connectorTargetListResponseSchema = pluginArchListResponseSchema("PluginArchConnectorTargetListResponse", connectorTargetSchema) +export const connectorTargetDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchConnectorTargetDetailResponse", connectorTargetSchema) +export const connectorTargetMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchConnectorTargetMutationResponse", connectorTargetSchema) +export const connectorMappingListResponseSchema = pluginArchListResponseSchema("PluginArchConnectorMappingListResponse", connectorMappingSchema) +export const connectorMappingMutationResponseSchema = pluginArchMutationResponseSchema("PluginArchConnectorMappingMutationResponse", connectorMappingSchema) +export const connectorSyncEventListResponseSchema = pluginArchListResponseSchema("PluginArchConnectorSyncEventListResponse", connectorSyncEventSchema) +export const connectorSyncEventDetailResponseSchema = pluginArchDetailResponseSchema("PluginArchConnectorSyncEventDetailResponse", connectorSyncEventSchema) +export const connectorSyncAsyncResponseSchema = pluginArchAsyncResponseSchema( + "PluginArchConnectorSyncAsyncResponse", + z.object({ id: connectorSyncEventIdSchema }), +) +export const githubRepositorySchema = z.object({ + id: z.number().int().positive(), + fullName: z.string().trim().min(1), + defaultBranch: z.string().trim().min(1).nullable(), + hasPluginManifest: z.boolean().optional(), + manifestKind: z.enum(["marketplace", "plugin"]).nullable().optional(), + marketplacePluginCount: z.number().int().nonnegative().nullable().optional(), + private: z.boolean(), +}).meta({ ref: "PluginArchGithubRepository" }) +export const githubRepositoryListResponseSchema = pluginArchListResponseSchema("PluginArchGithubRepositoryListResponse", githubRepositorySchema) +export const githubDiscoveryStepSchema = z.object({ + id: z.enum(["read_repository_structure", "check_marketplace_manifest", "check_plugin_manifests", "prepare_discovered_plugins"]), + label: z.string().trim().min(1), + status: z.enum(["completed", "running", "warning"]), +}).meta({ ref: "PluginArchGithubDiscoveryStep" }) +export const githubDiscoveryTreeSummarySchema = z.object({ + scannedEntryCount: z.number().int().nonnegative(), + strategy: z.enum(["git-tree-recursive"]), + truncated: z.boolean(), +}).meta({ ref: "PluginArchGithubDiscoveryTreeSummary" }) +export const githubDiscoveredPluginSchema = z.object({ + key: z.string().trim().min(1), + sourceKind: z.enum(["marketplace_entry", "plugin_manifest", "standalone_claude", "folder_inference"]), + rootPath: z.string(), + displayName: z.string().trim().min(1), + description: nullableStringSchema, + selectedByDefault: z.boolean(), + supported: z.boolean(), + manifestPath: nullableStringSchema, + warnings: z.array(z.string().trim().min(1)), + componentKinds: z.array(z.enum(["skill", "command", "agent", "hook", "mcp_server", "lsp_server", "monitor", "settings"])), + componentPaths: z.object({ + agents: z.array(z.string().trim().min(1)), + commands: z.array(z.string().trim().min(1)), + hooks: z.array(z.string().trim().min(1)), + lspServers: z.array(z.string().trim().min(1)), + mcpServers: z.array(z.string().trim().min(1)), + monitors: z.array(z.string().trim().min(1)), + settings: z.array(z.string().trim().min(1)), + skills: z.array(z.string().trim().min(1)), + }), + metadata: jsonObjectSchema, +}).meta({ ref: "PluginArchGithubDiscoveredPlugin" }) +export const githubConnectorDiscoveryResponseSchema = pluginArchMutationResponseSchema( + "PluginArchGithubConnectorDiscoveryResponse", + z.object({ + autoImportNewPlugins: z.boolean(), + classification: z.enum(["claude_marketplace_repo", "claude_multi_plugin_repo", "claude_single_plugin_repo", "folder_inferred_repo", "unsupported"]), + connectorInstance: connectorInstanceSchema, + connectorTarget: connectorTargetSchema, + discoveredPlugins: z.array(githubDiscoveredPluginSchema), + repositoryFullName: z.string().trim().min(1), + sourceRevisionRef: z.string().trim().min(1), + steps: z.array(githubDiscoveryStepSchema), + treeSummary: githubDiscoveryTreeSummarySchema, + warnings: z.array(z.string().trim().min(1)), + }), +) +export const githubDiscoveryTreeEntrySchema = z.object({ + id: z.string().trim().min(1), + kind: z.enum(["blob", "tree"]), + path: z.string().trim().min(1), + sha: nullableStringSchema, + size: z.number().int().nonnegative().nullable(), +}).meta({ ref: "PluginArchGithubDiscoveryTreeEntry" }) +export const githubDiscoveryTreeResponseSchema = pluginArchListResponseSchema("PluginArchGithubDiscoveryTreeResponse", githubDiscoveryTreeEntrySchema) +export const githubDiscoveryApplyResponseSchema = pluginArchMutationResponseSchema( + "PluginArchGithubDiscoveryApplyResponse", + z.object({ + autoImportNewPlugins: z.boolean(), + createdMarketplace: marketplaceSchema.nullable().optional(), + connectorInstance: connectorInstanceSchema, + connectorTarget: connectorTargetSchema, + createdPlugins: z.array(pluginSchema), + createdMappings: z.array(connectorMappingSchema), + materializedConfigObjects: z.array(configObjectSchema), + sourceRevisionRef: z.string().trim().min(1), + }), +) +export const githubInstallStartResponseSchema = pluginArchMutationResponseSchema( + "PluginArchGithubInstallStartResponse", + z.object({ + redirectUrl: z.string().url(), + state: z.string().trim().min(1), + }), +) +export const githubInstallCompleteResponseSchema = pluginArchMutationResponseSchema( + "PluginArchGithubInstallCompleteResponse", + z.object({ + connectorAccount: connectorAccountSchema, + repositories: z.array(githubRepositorySchema), + }), +) +export const githubSetupResponseSchema = pluginArchMutationResponseSchema( + "PluginArchGithubSetupResponse", + z.object({ + connectorAccount: connectorAccountSchema, + connectorInstance: connectorInstanceSchema, + connectorTarget: connectorTargetSchema, + }), +) +export const githubValidateTargetResponseSchema = pluginArchMutationResponseSchema( + "PluginArchGithubValidateTargetResponse", + z.object({ + branchExists: z.boolean(), + defaultBranch: z.string().trim().min(1).nullable(), + repositoryAccessible: z.boolean(), + }), +) diff --git a/ee/apps/den-api/src/routes/org/plugin-system/store.ts b/ee/apps/den-api/src/routes/org/plugin-system/store.ts new file mode 100644 index 0000000000..bd9f1ec841 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/plugin-system/store.ts @@ -0,0 +1,3780 @@ +import { and, asc, desc, eq, inArray, isNull } from "@openwork-ee/den-db/drizzle" +import { + ConfigObjectAccessGrantTable, + ConfigObjectTable, + ConfigObjectVersionTable, + ConnectorAccountTable, + ConnectorInstanceAccessGrantTable, + ConnectorInstanceTable, + ConnectorMappingTable, + ConnectorSourceBindingTable, + ConnectorSourceTombstoneTable, + ConnectorSyncEventTable, + ConnectorTargetTable, + MarketplaceAccessGrantTable, + MarketplacePluginTable, + MarketplaceTable, + MemberTable, + OrganizationTable, + PluginAccessGrantTable, + PluginConfigObjectTable, + PluginTable, +} from "@openwork-ee/den-db/schema" +import { createDenTypeId } from "@openwork-ee/utils/typeid" +import type { PluginArchActorContext, PluginArchResourceKind, PluginArchRole } from "./access.js" +import { requirePluginArchResourceRole, resolvePluginArchResourceRole } from "./access.js" +import { + buildGithubAppInstallUrl, + createGithubInstallStateToken, + GithubConnectorConfigError, + GithubConnectorRequestError, + getGithubAppSummary, + getGithubConnectorAppConfig, + getGithubInstallationAccessToken, + getGithubRepositoryTextFile, + getGithubRepositoryTree, + getGithubInstallationSummary, + listGithubInstallationRepositories, + validateGithubInstallationTarget, + verifyGithubInstallStateToken, +} from "./github-app.js" +import { + buildGithubRepoDiscovery, + type GithubDiscoveredPlugin, + type GithubDiscoveryClassification, + type GithubMarketplaceInfo, + type GithubDiscoveryTreeEntry, +} from "./github-discovery.js" +import { planConnectorImportedResourceCleanup, uniqueIds } from "./connector-cleanup.js" +import { db } from "../../../db.js" +import { env } from "../../../env.js" +import { roleIncludesOwner } from "../../../orgs.js" + +type OrganizationId = PluginArchActorContext["organizationContext"]["organization"]["id"] +type MemberId = PluginArchActorContext["organizationContext"]["currentMember"]["id"] +type TeamId = PluginArchActorContext["memberTeams"][number]["id"] +type ConfigObjectRow = typeof ConfigObjectTable.$inferSelect +type ConfigObjectVersionRow = typeof ConfigObjectVersionTable.$inferSelect +type MarketplaceRow = typeof MarketplaceTable.$inferSelect +type MarketplaceMembershipRow = typeof MarketplacePluginTable.$inferSelect +type PluginRow = typeof PluginTable.$inferSelect +type PluginMembershipRow = typeof PluginConfigObjectTable.$inferSelect +type ConfigObjectId = ConfigObjectRow["id"] +type ConfigObjectVersionId = ConfigObjectVersionRow["id"] +type MarketplaceId = MarketplaceRow["id"] +type MarketplaceMembershipId = MarketplaceMembershipRow["id"] +type PluginId = PluginRow["id"] +type PluginMembershipId = PluginMembershipRow["id"] +type AccessGrantRow = + | typeof ConfigObjectAccessGrantTable.$inferSelect + | typeof MarketplaceAccessGrantTable.$inferSelect + | typeof PluginAccessGrantTable.$inferSelect + | typeof ConnectorInstanceAccessGrantTable.$inferSelect +type ConfigObjectAccessGrantId = typeof ConfigObjectAccessGrantTable.$inferSelect.id +type MarketplaceAccessGrantId = typeof MarketplaceAccessGrantTable.$inferSelect.id +type PluginAccessGrantId = typeof PluginAccessGrantTable.$inferSelect.id +type ConnectorInstanceAccessGrantId = typeof ConnectorInstanceAccessGrantTable.$inferSelect.id +type ConnectorAccountRow = typeof ConnectorAccountTable.$inferSelect +type ConnectorInstanceRow = typeof ConnectorInstanceTable.$inferSelect +type ConnectorTargetRow = typeof ConnectorTargetTable.$inferSelect +type ConnectorMappingRow = typeof ConnectorMappingTable.$inferSelect +type ConnectorSyncEventRow = typeof ConnectorSyncEventTable.$inferSelect +type ConnectorAccountId = ConnectorAccountRow["id"] +type ConnectorInstanceId = ConnectorInstanceRow["id"] +type ConnectorTargetId = ConnectorTargetRow["id"] +type ConnectorMappingId = ConnectorMappingRow["id"] +type ConnectorSyncEventId = ConnectorSyncEventRow["id"] +type MemberRow = typeof MemberTable.$inferSelect +type OrganizationRow = typeof OrganizationTable.$inferSelect +type DbTransaction = Parameters[0]>[0] + +type CursorPage = { + items: TItem[] + nextCursor: string | null +} + +type GithubConnectorDiscoveryStep = { + id: "read_repository_structure" | "check_marketplace_manifest" | "check_plugin_manifests" | "prepare_discovered_plugins" + label: string + status: "completed" | "running" | "warning" +} + +type GithubConnectorDiscoveryTreeSummary = { + scannedEntryCount: number + strategy: "git-tree-recursive" + truncated: boolean +} + +type GithubDiscoveryImportPlan = { + objectType: ConnectorMappingRow["objectType"] + paths: string[] + selector: string +} + +type GithubDiscoveryCacheEntry = { + branch: string + classification: GithubDiscoveryClassification + discoveredPlugins: GithubDiscoveredPlugin[] + importPlansByPluginKey: Record + marketplace: GithubMarketplaceInfo | null + ref: string + repositoryFullName: string + sourceRevisionRef: string + treeSummary: GithubConnectorDiscoveryTreeSummary + warnings: string[] +} + +type GithubConnectorDiscoveryComputation = GithubDiscoveryCacheEntry & { + connectorInstance: ReturnType + connectorTarget: ReturnType + treeEntries: GithubDiscoveryTreeEntry[] +} + +type GithubDiscoverySnapshot = GithubDiscoveryCacheEntry & { + treeEntries: GithubDiscoveryTreeEntry[] +} + +type ConfigObjectInput = { + metadata?: Record + normalizedPayloadJson?: Record + parserMode?: string + rawSourceText?: string + schemaVersion?: string +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null +} + +type AccessGrantWrite = { + orgMembershipId?: MemberId + orgWide?: boolean + role: PluginArchRole + teamId?: TeamId +} + +type RepositorySummary = { + defaultBranch: string | null + fullName: string + hasPluginManifest?: boolean + id: number + manifestKind?: "marketplace" | "plugin" | null + marketplacePluginCount?: number | null + private: boolean +} + +type ConfigObjectResourceTarget = { + resourceId: ConfigObjectId + resourceKind: "config_object" +} + +type PluginResourceTarget = { + resourceId: PluginId + resourceKind: "plugin" +} + +type MarketplaceResourceTarget = { + resourceId: MarketplaceId + resourceKind: "marketplace" +} + +type ConnectorInstanceResourceTarget = { + resourceId: ConnectorInstanceId + resourceKind: "connector_instance" +} + +type ResourceTarget = + | ConfigObjectResourceTarget + | MarketplaceResourceTarget + | PluginResourceTarget + | ConnectorInstanceResourceTarget + +type ConfigObjectGrantTarget = ConfigObjectResourceTarget & { grantId: ConfigObjectAccessGrantId } +type MarketplaceGrantTarget = MarketplaceResourceTarget & { grantId: MarketplaceAccessGrantId } +type PluginGrantTarget = PluginResourceTarget & { grantId: PluginAccessGrantId } +type ConnectorInstanceGrantTarget = ConnectorInstanceResourceTarget & { grantId: ConnectorInstanceAccessGrantId } +type GrantTarget = ConfigObjectGrantTarget | MarketplaceGrantTarget | PluginGrantTarget | ConnectorInstanceGrantTarget + +export class PluginArchRouteFailure extends Error { + constructor( + readonly status: 400 | 404 | 409, + readonly error: string, + message: string, + ) { + super(message) + this.name = "PluginArchRouteFailure" + } +} + +function normalizeOptionalString(value: string | null | undefined) { + const trimmed = value?.trim() + return trimmed ? trimmed : null +} + +function firstTextLine(value: string) { + return value + .split(/\r?\n/g) + .map((line) => line.trim()) + .filter(Boolean)[0] ?? "" +} + +function stripLineDecorators(value: string) { + return value + .replace(/^#{1,6}\s+/, "") + .replace(/^[-*+]\s+/, "") + .replace(/^title\s*:\s*/i, "") + .replace(/^description\s*:\s*/i, "") + .trim() +} + +function deriveProjection(input: { objectType: ConfigObjectRow["objectType"]; value: ConfigObjectInput }) { + const metadata = input.value.metadata ?? {} + const payload = input.value.normalizedPayloadJson ?? {} + const rawSourceText = normalizeOptionalString(input.value.rawSourceText) + const titleCandidate = [ + typeof metadata.title === "string" ? metadata.title : null, + typeof metadata.name === "string" ? metadata.name : null, + typeof payload.title === "string" ? payload.title : null, + typeof payload.name === "string" ? payload.name : null, + rawSourceText ? stripLineDecorators(firstTextLine(rawSourceText)) : null, + ].find((value) => Boolean(normalizeOptionalString(value ?? undefined))) + + const descriptionCandidate = [ + typeof metadata.description === "string" ? metadata.description : null, + typeof payload.description === "string" ? payload.description : null, + rawSourceText + ? rawSourceText + .split(/\r?\n/g) + .map((line) => stripLineDecorators(line.trim())) + .filter(Boolean) + .slice(1) + .find(Boolean) ?? null + : null, + ].find((value) => Boolean(normalizeOptionalString(value ?? undefined))) + + const title = normalizeOptionalString(titleCandidate ?? undefined) + ?? `${input.objectType.charAt(0).toUpperCase()}${input.objectType.slice(1)} ${new Date().toISOString()}` + + const description = normalizeOptionalString(descriptionCandidate ?? undefined) + const searchText = [title, description, rawSourceText].filter(Boolean).join("\n") || null + + return { + description, + searchText, + title, + } +} + +function pageItems(items: TItem[], cursor: string | undefined, limit: number | undefined): CursorPage { + const ordered = [...items] + const pageSize = limit ?? 50 + const startIndex = cursor ? Math.max(ordered.findIndex((item) => item.id === cursor) + 1, 0) : 0 + const sliced = ordered.slice(startIndex, startIndex + pageSize) + const nextCursor = ordered.length > startIndex + pageSize ? sliced[sliced.length - 1]?.id ?? null : null + return { items: sliced, nextCursor } +} + +async function getLatestVersions(configObjectIds: ConfigObjectId[]) { + if (configObjectIds.length === 0) { + return new Map() + } + + const rows = await db + .select() + .from(ConfigObjectVersionTable) + .where(inArray(ConfigObjectVersionTable.configObjectId, configObjectIds)) + .orderBy(desc(ConfigObjectVersionTable.createdAt), desc(ConfigObjectVersionTable.id)) + + const latestByObjectId = new Map() + for (const row of rows) { + if (!latestByObjectId.has(row.configObjectId)) { + latestByObjectId.set(row.configObjectId, row) + } + } + + return latestByObjectId +} + +function serializeVersion(row: ConfigObjectVersionRow) { + return { + configObjectId: row.configObjectId, + connectorSyncEventId: row.connectorSyncEventId, + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + createdVia: row.createdVia, + id: row.id, + isDeletedVersion: row.isDeletedVersion, + normalizedPayloadJson: row.normalizedPayloadJson, + rawSourceText: row.rawSourceText, + schemaVersion: row.schemaVersion, + sourceRevisionRef: row.sourceRevisionRef, + } +} + +function serializeConfigObject(row: ConfigObjectRow, latestVersion: ConfigObjectVersionRow | null) { + return { + connectorInstanceId: row.connectorInstanceId, + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + currentFileExtension: row.currentFileExtension, + currentFileName: row.currentFileName, + currentRelativePath: row.currentRelativePath, + deletedAt: row.deletedAt ? row.deletedAt.toISOString() : null, + description: row.description, + id: row.id, + latestVersion: latestVersion ? serializeVersion(latestVersion) : null, + objectType: row.objectType, + organizationId: row.organizationId, + searchText: row.searchText, + sourceMode: row.sourceMode, + status: row.status, + title: row.title, + updatedAt: row.updatedAt.toISOString(), + } +} + +type PluginMarketplaceSummary = { + id: string + name: string +} + +function serializePlugin(row: PluginRow, memberCount?: number, marketplaces: PluginMarketplaceSummary[] = []) { + return { + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + deletedAt: row.deletedAt ? row.deletedAt.toISOString() : null, + description: row.description, + id: row.id, + marketplaces, + memberCount, + name: row.name, + organizationId: row.organizationId, + status: row.status, + updatedAt: row.updatedAt.toISOString(), + } +} + +function serializeMarketplace(row: MarketplaceRow, pluginCount?: number) { + return { + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + deletedAt: row.deletedAt ? row.deletedAt.toISOString() : null, + description: row.description, + id: row.id, + name: row.name, + organizationId: row.organizationId, + pluginCount, + status: row.status, + updatedAt: row.updatedAt.toISOString(), + } +} + +function serializeMembership(row: PluginMembershipRow, configObject?: ReturnType) { + return { + configObject, + configObjectId: row.configObjectId, + connectorMappingId: row.connectorMappingId, + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + id: row.id, + membershipSource: row.membershipSource, + pluginId: row.pluginId, + removedAt: row.removedAt ? row.removedAt.toISOString() : null, + } +} + +function serializeMarketplaceMembership(row: MarketplaceMembershipRow, plugin?: ReturnType) { + return { + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + id: row.id, + marketplaceId: row.marketplaceId, + membershipSource: row.membershipSource, + plugin, + pluginId: row.pluginId, + removedAt: row.removedAt ? row.removedAt.toISOString() : null, + } +} + +function serializeAccessGrant(row: AccessGrantRow) { + return { + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + id: row.id, + orgMembershipId: row.orgMembershipId, + orgWide: row.orgWide, + removedAt: row.removedAt ? row.removedAt.toISOString() : null, + role: row.role, + teamId: row.teamId, + } +} + +function serializeConnectorAccount(row: ConnectorAccountRow, creatorName: string | null = null) { + return { + connectorType: row.connectorType, + createdAt: row.createdAt.toISOString(), + createdByName: creatorName, + createdByOrgMembershipId: row.createdByOrgMembershipId, + displayName: row.displayName, + externalAccountRef: row.externalAccountRef, + id: row.id, + metadata: row.metadataJson ?? undefined, + organizationId: row.organizationId, + remoteId: row.remoteId, + status: row.status, + updatedAt: row.updatedAt.toISOString(), + } +} + +function resolveCreatorName(context: PluginArchActorContext, memberId: string) { + const member = context.organizationContext.members.find((entry) => entry.id === memberId) + if (!member) return null + return member.user.name?.trim() || member.user.email || null +} + +function serializeConnectorInstance(row: ConnectorInstanceRow) { + return { + connectorAccountId: row.connectorAccountId, + connectorType: row.connectorType, + createdAt: row.createdAt.toISOString(), + createdByOrgMembershipId: row.createdByOrgMembershipId, + id: row.id, + instanceConfigJson: row.instanceConfigJson, + lastSyncCursor: row.lastSyncCursor, + lastSyncStatus: row.lastSyncStatus, + lastSyncedAt: row.lastSyncedAt ? row.lastSyncedAt.toISOString() : null, + name: row.name, + organizationId: row.organizationId, + remoteId: row.remoteId, + status: row.status, + updatedAt: row.updatedAt.toISOString(), + } +} + +function serializeConnectorTarget(row: ConnectorTargetRow) { + return { + connectorInstanceId: row.connectorInstanceId, + connectorType: row.connectorType, + createdAt: row.createdAt.toISOString(), + externalTargetRef: row.externalTargetRef, + id: row.id, + remoteId: row.remoteId, + targetConfigJson: row.targetConfigJson, + targetKind: row.targetKind, + updatedAt: row.updatedAt.toISOString(), + } +} + +function serializeConnectorMapping(row: ConnectorMappingRow) { + return { + autoAddToPlugin: row.autoAddToPlugin, + connectorInstanceId: row.connectorInstanceId, + connectorTargetId: row.connectorTargetId, + connectorType: row.connectorType, + createdAt: row.createdAt.toISOString(), + id: row.id, + mappingConfigJson: row.mappingConfigJson, + mappingKind: row.mappingKind, + objectType: row.objectType, + pluginId: row.pluginId, + remoteId: row.remoteId, + selector: row.selector, + updatedAt: row.updatedAt.toISOString(), + } +} + +function serializeConnectorSyncEvent(row: ConnectorSyncEventRow) { + return { + completedAt: row.completedAt ? row.completedAt.toISOString() : null, + connectorInstanceId: row.connectorInstanceId, + connectorTargetId: row.connectorTargetId, + connectorType: row.connectorType, + eventType: row.eventType, + externalEventRef: row.externalEventRef, + id: row.id, + remoteId: row.remoteId, + sourceRevisionRef: row.sourceRevisionRef, + startedAt: row.startedAt.toISOString(), + status: row.status, + summaryJson: row.summaryJson, + } +} + +async function getConfigObjectRow(organizationId: OrganizationId, configObjectId: ConfigObjectId) { + const rows = await db + .select() + .from(ConfigObjectTable) + .where(and(eq(ConfigObjectTable.organizationId, organizationId), eq(ConfigObjectTable.id, configObjectId))) + .limit(1) + + return rows[0] ?? null +} + +async function getPluginRow(organizationId: OrganizationId, pluginId: PluginId) { + const rows = await db + .select() + .from(PluginTable) + .where(and(eq(PluginTable.organizationId, organizationId), eq(PluginTable.id, pluginId))) + .limit(1) + + return rows[0] ?? null +} + +async function getMarketplaceRow(organizationId: OrganizationId, marketplaceId: MarketplaceId) { + const rows = await db + .select() + .from(MarketplaceTable) + .where(and(eq(MarketplaceTable.organizationId, organizationId), eq(MarketplaceTable.id, marketplaceId))) + .limit(1) + + return rows[0] ?? null +} + +async function getConnectorAccountRow(organizationId: OrganizationId, connectorAccountId: ConnectorAccountId) { + const rows = await db + .select() + .from(ConnectorAccountTable) + .where(and(eq(ConnectorAccountTable.organizationId, organizationId), eq(ConnectorAccountTable.id, connectorAccountId))) + .limit(1) + + return rows[0] ?? null +} + +async function getConnectorInstanceRow(organizationId: OrganizationId, connectorInstanceId: ConnectorInstanceId) { + const rows = await db + .select() + .from(ConnectorInstanceTable) + .where(and(eq(ConnectorInstanceTable.organizationId, organizationId), eq(ConnectorInstanceTable.id, connectorInstanceId))) + .limit(1) + + return rows[0] ?? null +} + +async function getConnectorTargetRow(organizationId: OrganizationId, connectorTargetId: ConnectorTargetId) { + const rows = await db + .select({ target: ConnectorTargetTable, instance: ConnectorInstanceTable }) + .from(ConnectorTargetTable) + .innerJoin(ConnectorInstanceTable, eq(ConnectorTargetTable.connectorInstanceId, ConnectorInstanceTable.id)) + .where(and(eq(ConnectorTargetTable.id, connectorTargetId), eq(ConnectorInstanceTable.organizationId, organizationId))) + .limit(1) + + return rows[0]?.target ?? null +} + +async function getConnectorMappingRow(organizationId: OrganizationId, connectorMappingId: ConnectorMappingId) { + const rows = await db + .select({ mapping: ConnectorMappingTable, instance: ConnectorInstanceTable }) + .from(ConnectorMappingTable) + .innerJoin(ConnectorInstanceTable, eq(ConnectorMappingTable.connectorInstanceId, ConnectorInstanceTable.id)) + .where(and(eq(ConnectorMappingTable.id, connectorMappingId), eq(ConnectorInstanceTable.organizationId, organizationId))) + .limit(1) + + return rows[0]?.mapping ?? null +} + +async function getConnectorSyncEventRow(organizationId: OrganizationId, connectorSyncEventId: ConnectorSyncEventId) { + const rows = await db + .select({ event: ConnectorSyncEventTable, instance: ConnectorInstanceTable }) + .from(ConnectorSyncEventTable) + .innerJoin(ConnectorInstanceTable, eq(ConnectorSyncEventTable.connectorInstanceId, ConnectorInstanceTable.id)) + .where(and(eq(ConnectorSyncEventTable.id, connectorSyncEventId), eq(ConnectorInstanceTable.organizationId, organizationId))) + .limit(1) + + return rows[0]?.event ?? null +} + +async function ensureVisibleConfigObject(context: PluginArchActorContext, configObjectId: ConfigObjectId) { + const row = await getConfigObjectRow(context.organizationContext.organization.id, configObjectId) + if (!row) { + throw new PluginArchRouteFailure(404, "config_object_not_found", "Config object not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "config_object", role: "viewer" }) + return row +} + +async function ensureEditablePlugin(context: PluginArchActorContext, pluginId: PluginId) { + const row = await getPluginRow(context.organizationContext.organization.id, pluginId) + if (!row) { + throw new PluginArchRouteFailure(404, "plugin_not_found", "Plugin not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "plugin", role: "editor" }) + return row +} + +async function ensureEditableMarketplace(context: PluginArchActorContext, marketplaceId: MarketplaceId) { + const row = await getMarketplaceRow(context.organizationContext.organization.id, marketplaceId) + if (!row) { + throw new PluginArchRouteFailure(404, "marketplace_not_found", "Marketplace not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "marketplace", role: "editor" }) + return row +} + +async function ensureVisibleMarketplace(context: PluginArchActorContext, marketplaceId: MarketplaceId) { + const row = await getMarketplaceRow(context.organizationContext.organization.id, marketplaceId) + if (!row) { + throw new PluginArchRouteFailure(404, "marketplace_not_found", "Marketplace not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "marketplace", role: "viewer" }) + return row +} + +async function ensureVisiblePlugin(context: PluginArchActorContext, pluginId: PluginId) { + const row = await getPluginRow(context.organizationContext.organization.id, pluginId) + if (!row) { + throw new PluginArchRouteFailure(404, "plugin_not_found", "Plugin not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "plugin", role: "viewer" }) + return row +} + +async function ensureVisibleConnectorInstance(context: PluginArchActorContext, connectorInstanceId: ConnectorInstanceId) { + const row = await getConnectorInstanceRow(context.organizationContext.organization.id, connectorInstanceId) + if (!row) { + throw new PluginArchRouteFailure(404, "connector_instance_not_found", "Connector instance not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "connector_instance", role: "viewer" }) + return row +} + +async function ensureEditableConnectorInstance(context: PluginArchActorContext, connectorInstanceId: ConnectorInstanceId) { + const row = await getConnectorInstanceRow(context.organizationContext.organization.id, connectorInstanceId) + if (!row) { + throw new PluginArchRouteFailure(404, "connector_instance_not_found", "Connector instance not found.") + } + await requirePluginArchResourceRole({ context, resourceId: row.id, resourceKind: "connector_instance", role: "editor" }) + return row +} + +async function upsertGrant(input: ResourceTarget & { + context: PluginArchActorContext + value: AccessGrantWrite +}) { + const createdAt = new Date() + const createdByOrgMembershipId = input.context.organizationContext.currentMember.id + const organizationId = input.context.organizationContext.organization.id + + if (input.resourceKind === "config_object") { + const existing = await db + .select() + .from(ConfigObjectAccessGrantTable) + .where(and( + eq(ConfigObjectAccessGrantTable.configObjectId, input.resourceId), + input.value.orgMembershipId + ? eq(ConfigObjectAccessGrantTable.orgMembershipId, input.value.orgMembershipId) + : input.value.teamId + ? eq(ConfigObjectAccessGrantTable.teamId, input.value.teamId) + : eq(ConfigObjectAccessGrantTable.orgWide, true), + )) + .limit(1) + + if (existing[0]) { + await db + .update(ConfigObjectAccessGrantTable) + .set({ + createdByOrgMembershipId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + removedAt: null, + role: input.value.role, + teamId: input.value.teamId ?? null, + }) + .where(eq(ConfigObjectAccessGrantTable.id, existing[0].id)) + return serializeAccessGrant({ ...existing[0], createdByOrgMembershipId, orgMembershipId: input.value.orgMembershipId ?? null, orgWide: input.value.orgWide ?? false, removedAt: null, role: input.value.role, teamId: input.value.teamId ?? null }) + } + + const row = { + configObjectId: input.resourceId, + createdAt, + createdByOrgMembershipId, + id: createDenTypeId("configObjectAccessGrant"), + organizationId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + role: input.value.role, + teamId: input.value.teamId ?? null, + } + await db.insert(ConfigObjectAccessGrantTable).values(row) + return serializeAccessGrant({ ...row, removedAt: null }) + } + + if (input.resourceKind === "marketplace") { + const existing = await db + .select() + .from(MarketplaceAccessGrantTable) + .where(and( + eq(MarketplaceAccessGrantTable.marketplaceId, input.resourceId), + input.value.orgMembershipId + ? eq(MarketplaceAccessGrantTable.orgMembershipId, input.value.orgMembershipId) + : input.value.teamId + ? eq(MarketplaceAccessGrantTable.teamId, input.value.teamId) + : eq(MarketplaceAccessGrantTable.orgWide, true), + )) + .limit(1) + + if (existing[0]) { + await db + .update(MarketplaceAccessGrantTable) + .set({ + createdByOrgMembershipId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + removedAt: null, + role: input.value.role, + teamId: input.value.teamId ?? null, + }) + .where(eq(MarketplaceAccessGrantTable.id, existing[0].id)) + return serializeAccessGrant({ ...existing[0], createdByOrgMembershipId, orgMembershipId: input.value.orgMembershipId ?? null, orgWide: input.value.orgWide ?? false, removedAt: null, role: input.value.role, teamId: input.value.teamId ?? null }) + } + + const row = { + createdAt, + createdByOrgMembershipId, + id: createDenTypeId("marketplaceAccessGrant"), + marketplaceId: input.resourceId, + organizationId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + role: input.value.role, + teamId: input.value.teamId ?? null, + } + await db.insert(MarketplaceAccessGrantTable).values(row) + return serializeAccessGrant({ ...row, removedAt: null }) + } + + if (input.resourceKind === "plugin") { + const existing = await db + .select() + .from(PluginAccessGrantTable) + .where(and( + eq(PluginAccessGrantTable.pluginId, input.resourceId), + input.value.orgMembershipId + ? eq(PluginAccessGrantTable.orgMembershipId, input.value.orgMembershipId) + : input.value.teamId + ? eq(PluginAccessGrantTable.teamId, input.value.teamId) + : eq(PluginAccessGrantTable.orgWide, true), + )) + .limit(1) + + if (existing[0]) { + await db + .update(PluginAccessGrantTable) + .set({ + createdByOrgMembershipId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + removedAt: null, + role: input.value.role, + teamId: input.value.teamId ?? null, + }) + .where(eq(PluginAccessGrantTable.id, existing[0].id)) + return serializeAccessGrant({ ...existing[0], createdByOrgMembershipId, orgMembershipId: input.value.orgMembershipId ?? null, orgWide: input.value.orgWide ?? false, removedAt: null, role: input.value.role, teamId: input.value.teamId ?? null }) + } + + const row = { + createdAt, + createdByOrgMembershipId, + id: createDenTypeId("pluginAccessGrant"), + organizationId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + pluginId: input.resourceId, + role: input.value.role, + teamId: input.value.teamId ?? null, + } + await db.insert(PluginAccessGrantTable).values(row) + return serializeAccessGrant({ ...row, removedAt: null }) + } + + const existing = await db + .select() + .from(ConnectorInstanceAccessGrantTable) + .where(and( + eq(ConnectorInstanceAccessGrantTable.connectorInstanceId, input.resourceId), + input.value.orgMembershipId + ? eq(ConnectorInstanceAccessGrantTable.orgMembershipId, input.value.orgMembershipId) + : input.value.teamId + ? eq(ConnectorInstanceAccessGrantTable.teamId, input.value.teamId) + : eq(ConnectorInstanceAccessGrantTable.orgWide, true), + )) + .limit(1) + + if (existing[0]) { + await db + .update(ConnectorInstanceAccessGrantTable) + .set({ + createdByOrgMembershipId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + removedAt: null, + role: input.value.role, + teamId: input.value.teamId ?? null, + }) + .where(eq(ConnectorInstanceAccessGrantTable.id, existing[0].id)) + return serializeAccessGrant({ ...existing[0], createdByOrgMembershipId, orgMembershipId: input.value.orgMembershipId ?? null, orgWide: input.value.orgWide ?? false, removedAt: null, role: input.value.role, teamId: input.value.teamId ?? null }) + } + + const row = { + connectorInstanceId: input.resourceId, + createdAt, + createdByOrgMembershipId, + id: createDenTypeId("connectorInstanceAccessGrant"), + organizationId, + orgMembershipId: input.value.orgMembershipId ?? null, + orgWide: input.value.orgWide ?? false, + role: input.value.role, + teamId: input.value.teamId ?? null, + } + await db.insert(ConnectorInstanceAccessGrantTable).values(row) + return serializeAccessGrant({ ...row, removedAt: null }) +} + +async function removeGrant(input: GrantTarget & { context: PluginArchActorContext }) { + const removedAt = new Date() + if (input.resourceKind === "config_object") { + const rows = await db + .select() + .from(ConfigObjectAccessGrantTable) + .where(and(eq(ConfigObjectAccessGrantTable.id, input.grantId), eq(ConfigObjectAccessGrantTable.configObjectId, input.resourceId))) + .limit(1) + if (!rows[0]) throw new PluginArchRouteFailure(404, "access_grant_not_found", "Access grant not found.") + await db.update(ConfigObjectAccessGrantTable).set({ removedAt }).where(eq(ConfigObjectAccessGrantTable.id, input.grantId)) + return + } + if (input.resourceKind === "marketplace") { + const rows = await db + .select() + .from(MarketplaceAccessGrantTable) + .where(and(eq(MarketplaceAccessGrantTable.id, input.grantId), eq(MarketplaceAccessGrantTable.marketplaceId, input.resourceId))) + .limit(1) + if (!rows[0]) throw new PluginArchRouteFailure(404, "access_grant_not_found", "Access grant not found.") + await db.update(MarketplaceAccessGrantTable).set({ removedAt }).where(eq(MarketplaceAccessGrantTable.id, input.grantId)) + return + } + if (input.resourceKind === "plugin") { + const rows = await db + .select() + .from(PluginAccessGrantTable) + .where(and(eq(PluginAccessGrantTable.id, input.grantId), eq(PluginAccessGrantTable.pluginId, input.resourceId))) + .limit(1) + if (!rows[0]) throw new PluginArchRouteFailure(404, "access_grant_not_found", "Access grant not found.") + await db.update(PluginAccessGrantTable).set({ removedAt }).where(eq(PluginAccessGrantTable.id, input.grantId)) + return + } + const rows = await db + .select() + .from(ConnectorInstanceAccessGrantTable) + .where(and(eq(ConnectorInstanceAccessGrantTable.id, input.grantId), eq(ConnectorInstanceAccessGrantTable.connectorInstanceId, input.resourceId))) + .limit(1) + if (!rows[0]) throw new PluginArchRouteFailure(404, "access_grant_not_found", "Access grant not found.") + await db.update(ConnectorInstanceAccessGrantTable).set({ removedAt }).where(eq(ConnectorInstanceAccessGrantTable.id, input.grantId)) +} + +export async function listConfigObjects(input: { + connectorInstanceId?: ConnectorInstanceId + context: PluginArchActorContext + cursor?: string + includeDeleted?: boolean + limit?: number + pluginId?: PluginId + q?: string + sourceMode?: ConfigObjectRow["sourceMode"] + status?: ConfigObjectRow["status"] + type?: ConfigObjectRow["objectType"] +}) { + const organizationId = input.context.organizationContext.organization.id + const rows = await db + .select() + .from(ConfigObjectTable) + .where(eq(ConfigObjectTable.organizationId, organizationId)) + .orderBy(desc(ConfigObjectTable.updatedAt), desc(ConfigObjectTable.id)) + + const latestVersions = await getLatestVersions(rows.map((row) => row.id)) + const filtered: ReturnType[] = [] + + for (const row of rows) { + const role = await resolvePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "config_object" }) + if (!role) continue + if (input.type && row.objectType !== input.type) continue + if (input.status && row.status !== input.status) continue + if (input.sourceMode && row.sourceMode !== input.sourceMode) continue + if (!input.includeDeleted && row.status === "deleted") continue + if (input.connectorInstanceId && row.connectorInstanceId !== input.connectorInstanceId) continue + if (input.q) { + const haystack = `${row.title}\n${row.description ?? ""}\n${row.searchText ?? ""}`.toLowerCase() + if (!haystack.includes(input.q.toLowerCase())) continue + } + if (input.pluginId) { + const memberships = await db + .select({ id: PluginConfigObjectTable.id }) + .from(PluginConfigObjectTable) + .where(and(eq(PluginConfigObjectTable.pluginId, input.pluginId), eq(PluginConfigObjectTable.configObjectId, row.id), isNull(PluginConfigObjectTable.removedAt))) + .limit(1) + if (!memberships[0]) continue + } + filtered.push(serializeConfigObject(row, latestVersions.get(row.id) ?? null)) + } + + return pageItems(filtered, input.cursor, input.limit) +} + +export async function getConfigObjectDetail(context: PluginArchActorContext, configObjectId: ConfigObjectId) { + const row = await ensureVisibleConfigObject(context, configObjectId) + const latest = await getLatestVersions([row.id]) + return serializeConfigObject(row, latest.get(row.id) ?? null) +} + +export async function createConfigObject(input: { + context: PluginArchActorContext + objectType: ConfigObjectRow["objectType"] + pluginIds?: PluginId[] + sourceMode: ConfigObjectRow["sourceMode"] + value: ConfigObjectInput +}) { + if (input.sourceMode === "connector") { + throw new PluginArchRouteFailure(400, "invalid_request", "Connector-managed config objects must be created through connector sync.") + } + + for (const pluginId of input.pluginIds ?? []) { + await requirePluginArchResourceRole({ context: input.context, resourceId: pluginId, resourceKind: "plugin", role: "editor" }) + } + + const now = new Date() + const projection = deriveProjection({ objectType: input.objectType, value: input.value }) + const organizationId = input.context.organizationContext.organization.id + const createdByOrgMembershipId = input.context.organizationContext.currentMember.id + const configObjectId = createDenTypeId("configObject") + const versionId = createDenTypeId("configObjectVersion") + + await db.transaction(async (tx) => { + await tx.insert(ConfigObjectTable).values({ + createdAt: now, + createdByOrgMembershipId, + currentFileExtension: null, + currentFileName: null, + currentRelativePath: null, + deletedAt: null, + description: projection.description, + id: configObjectId, + objectType: input.objectType, + organizationId, + searchText: projection.searchText, + sourceMode: input.sourceMode, + status: "active", + title: projection.title, + updatedAt: now, + connectorInstanceId: null, + }) + + await tx.insert(ConfigObjectVersionTable).values({ + configObjectId, + connectorSyncEventId: null, + createdAt: now, + createdByOrgMembershipId, + createdVia: input.sourceMode, + id: versionId, + isDeletedVersion: false, + normalizedPayloadJson: input.value.normalizedPayloadJson ?? null, + organizationId, + rawSourceText: normalizeOptionalString(input.value.rawSourceText), + schemaVersion: normalizeOptionalString(input.value.schemaVersion), + sourceRevisionRef: null, + }) + + await tx.insert(ConfigObjectAccessGrantTable).values({ + configObjectId, + createdAt: now, + createdByOrgMembershipId, + id: createDenTypeId("configObjectAccessGrant"), + organizationId, + orgMembershipId: createdByOrgMembershipId, + orgWide: false, + role: "manager", + teamId: null, + }) + + for (const pluginId of input.pluginIds ?? []) { + const existing = await tx + .select({ id: PluginConfigObjectTable.id }) + .from(PluginConfigObjectTable) + .where(and(eq(PluginConfigObjectTable.pluginId, pluginId), eq(PluginConfigObjectTable.configObjectId, configObjectId))) + .limit(1) + + if (existing[0]) { + await tx.update(PluginConfigObjectTable).set({ removedAt: null }).where(eq(PluginConfigObjectTable.id, existing[0].id)) + } else { + await tx.insert(PluginConfigObjectTable).values({ + configObjectId, + connectorMappingId: null, + createdAt: now, + createdByOrgMembershipId, + id: createDenTypeId("pluginConfigObject"), + membershipSource: "manual", + organizationId, + pluginId, + }) + } + } + }) + + return getConfigObjectDetail(input.context, configObjectId) +} + +export async function listConfigObjectVersions(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId; cursor?: string; includeDeleted?: boolean; limit?: number }) { + const configObject = await ensureVisibleConfigObject(input.context, input.configObjectId) + const rows = await db + .select() + .from(ConfigObjectVersionTable) + .where(eq(ConfigObjectVersionTable.configObjectId, configObject.id)) + .orderBy(desc(ConfigObjectVersionTable.createdAt), desc(ConfigObjectVersionTable.id)) + + const items = rows + .filter((row) => input.includeDeleted || !row.isDeletedVersion) + .map((row) => ({ ...serializeVersion(row), id: row.id })) + + return pageItems(items, input.cursor, input.limit) +} + +export async function getConfigObjectVersion(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId; versionId: ConfigObjectVersionId }) { + await ensureVisibleConfigObject(input.context, input.configObjectId) + const rows = await db + .select() + .from(ConfigObjectVersionTable) + .where(and(eq(ConfigObjectVersionTable.id, input.versionId), eq(ConfigObjectVersionTable.configObjectId, input.configObjectId))) + .limit(1) + if (!rows[0]) { + throw new PluginArchRouteFailure(404, "config_object_version_not_found", "Config object version not found.") + } + return serializeVersion(rows[0]) +} + +export async function getLatestConfigObjectVersion(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId }) { + await ensureVisibleConfigObject(input.context, input.configObjectId) + const rows = await db + .select() + .from(ConfigObjectVersionTable) + .where(eq(ConfigObjectVersionTable.configObjectId, input.configObjectId)) + .orderBy(desc(ConfigObjectVersionTable.createdAt), desc(ConfigObjectVersionTable.id)) + .limit(1) + if (!rows[0]) { + throw new PluginArchRouteFailure(404, "config_object_version_not_found", "Config object version not found.") + } + return serializeVersion(rows[0]) +} + +export async function createConfigObjectVersion(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId; reason?: string; value: ConfigObjectInput }) { + const row = await getConfigObjectRow(input.context.organizationContext.organization.id, input.configObjectId) + if (!row) { + throw new PluginArchRouteFailure(404, "config_object_not_found", "Config object not found.") + } + await requirePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "config_object", role: "editor" }) + + const now = new Date() + const projection = deriveProjection({ objectType: row.objectType, value: input.value }) + await db.transaction(async (tx) => { + await tx.insert(ConfigObjectVersionTable).values({ + configObjectId: row.id, + connectorSyncEventId: null, + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + createdVia: row.sourceMode === "connector" ? "connector" : row.sourceMode, + id: createDenTypeId("configObjectVersion"), + isDeletedVersion: false, + normalizedPayloadJson: input.value.normalizedPayloadJson ?? null, + organizationId: row.organizationId, + rawSourceText: normalizeOptionalString(input.value.rawSourceText), + schemaVersion: normalizeOptionalString(input.value.schemaVersion), + sourceRevisionRef: normalizeOptionalString(input.reason), + }) + + await tx.update(ConfigObjectTable).set({ + description: projection.description, + searchText: projection.searchText, + title: projection.title, + updatedAt: now, + }).where(eq(ConfigObjectTable.id, row.id)) + }) + + return getConfigObjectDetail(input.context, row.id) +} + +export async function setConfigObjectLifecycle(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId; action: "archive" | "delete" | "restore" }) { + const row = await getConfigObjectRow(input.context.organizationContext.organization.id, input.configObjectId) + if (!row) { + throw new PluginArchRouteFailure(404, "config_object_not_found", "Config object not found.") + } + await requirePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "config_object", role: "manager" }) + const now = new Date() + const patch = input.action === "archive" + ? { deletedAt: null, status: "archived" as const, updatedAt: now } + : input.action === "delete" + ? { deletedAt: now, status: "deleted" as const, updatedAt: now } + : { deletedAt: null, status: "active" as const, updatedAt: now } + + await db.update(ConfigObjectTable).set(patch).where(eq(ConfigObjectTable.id, row.id)) + return getConfigObjectDetail(input.context, row.id) +} + +export async function listConfigObjectPlugins(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId }) { + const configObject = await ensureVisibleConfigObject(input.context, input.configObjectId) + const latest = await getLatestVersions([configObject.id]) + const memberships = await db + .select() + .from(PluginConfigObjectTable) + .where(eq(PluginConfigObjectTable.configObjectId, configObject.id)) + .orderBy(desc(PluginConfigObjectTable.createdAt)) + + const serializedConfigObject = serializeConfigObject(configObject, latest.get(configObject.id) ?? null) + const visible: ReturnType[] = [] + for (const membership of memberships) { + const pluginRole = await resolvePluginArchResourceRole({ context: input.context, resourceId: membership.pluginId, resourceKind: "plugin" }) + if (!pluginRole) continue + visible.push(serializeMembership(membership, serializedConfigObject)) + } + return { items: visible, nextCursor: null } +} + +export async function attachConfigObjectToPlugin(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId; membershipSource?: PluginMembershipRow["membershipSource"]; pluginId: PluginId }) { + await ensureVisibleConfigObject(input.context, input.configObjectId) + await ensureEditablePlugin(input.context, input.pluginId) + + const existing = await db + .select() + .from(PluginConfigObjectTable) + .where(and(eq(PluginConfigObjectTable.pluginId, input.pluginId), eq(PluginConfigObjectTable.configObjectId, input.configObjectId))) + .limit(1) + + const now = new Date() + let membershipId = existing[0]?.id ?? null + if (existing[0]) { + await db.update(PluginConfigObjectTable).set({ membershipSource: input.membershipSource ?? existing[0].membershipSource, removedAt: null }).where(eq(PluginConfigObjectTable.id, existing[0].id)) + } else { + membershipId = createDenTypeId("pluginConfigObject") + await db.insert(PluginConfigObjectTable).values({ + configObjectId: input.configObjectId, + connectorMappingId: null, + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + id: membershipId, + membershipSource: input.membershipSource ?? "manual", + organizationId: input.context.organizationContext.organization.id, + pluginId: input.pluginId, + }) + } + + const rows = await db.select().from(PluginConfigObjectTable).where(eq(PluginConfigObjectTable.id, membershipId!)).limit(1) + return serializeMembership(rows[0]) +} + +export async function removeConfigObjectFromPlugin(input: { context: PluginArchActorContext; configObjectId: ConfigObjectId; pluginId: PluginId }) { + await ensureVisibleConfigObject(input.context, input.configObjectId) + await ensureEditablePlugin(input.context, input.pluginId) + const rows = await db + .select() + .from(PluginConfigObjectTable) + .where(and(eq(PluginConfigObjectTable.pluginId, input.pluginId), eq(PluginConfigObjectTable.configObjectId, input.configObjectId), isNull(PluginConfigObjectTable.removedAt))) + .limit(1) + if (!rows[0]) { + throw new PluginArchRouteFailure(404, "plugin_membership_not_found", "Plugin membership not found.") + } + await db.update(PluginConfigObjectTable).set({ removedAt: new Date() }).where(eq(PluginConfigObjectTable.id, rows[0].id)) +} + +export async function listResourceAccess(input: { context: PluginArchActorContext } & ResourceTarget) { + await requirePluginArchResourceRole({ context: input.context, resourceId: input.resourceId, resourceKind: input.resourceKind, role: "manager" }) + + if (input.resourceKind === "config_object") { + const rows = await db.select().from(ConfigObjectAccessGrantTable).where(eq(ConfigObjectAccessGrantTable.configObjectId, input.resourceId)).orderBy(desc(ConfigObjectAccessGrantTable.createdAt)) + return { items: rows.map((row) => serializeAccessGrant(row)), nextCursor: null } + } + if (input.resourceKind === "marketplace") { + const rows = await db.select().from(MarketplaceAccessGrantTable).where(eq(MarketplaceAccessGrantTable.marketplaceId, input.resourceId)).orderBy(desc(MarketplaceAccessGrantTable.createdAt)) + return { items: rows.map((row) => serializeAccessGrant(row)), nextCursor: null } + } + if (input.resourceKind === "plugin") { + const rows = await db.select().from(PluginAccessGrantTable).where(eq(PluginAccessGrantTable.pluginId, input.resourceId)).orderBy(desc(PluginAccessGrantTable.createdAt)) + return { items: rows.map((row) => serializeAccessGrant(row)), nextCursor: null } + } + const rows = await db.select().from(ConnectorInstanceAccessGrantTable).where(eq(ConnectorInstanceAccessGrantTable.connectorInstanceId, input.resourceId)).orderBy(desc(ConnectorInstanceAccessGrantTable.createdAt)) + return { items: rows.map((row) => serializeAccessGrant(row)), nextCursor: null } +} + +export async function createResourceAccessGrant(input: { context: PluginArchActorContext; value: AccessGrantWrite } & ResourceTarget) { + await requirePluginArchResourceRole({ context: input.context, resourceId: input.resourceId, resourceKind: input.resourceKind, role: "manager" }) + return upsertGrant(input) +} + +export async function deleteResourceAccessGrant(input: { context: PluginArchActorContext } & GrantTarget) { + await requirePluginArchResourceRole({ context: input.context, resourceId: input.resourceId, resourceKind: input.resourceKind, role: "manager" }) + return removeGrant(input) +} + +async function collectPluginMarketplaces(organizationId: PluginRow["organizationId"], pluginIds: PluginId[]): Promise> { + const byPlugin = new Map() + if (pluginIds.length === 0) { + return byPlugin + } + + const rows = await db + .select({ + marketplaceId: MarketplaceTable.id, + marketplaceName: MarketplaceTable.name, + pluginId: MarketplacePluginTable.pluginId, + }) + .from(MarketplacePluginTable) + .innerJoin(MarketplaceTable, eq(MarketplacePluginTable.marketplaceId, MarketplaceTable.id)) + .where(and( + eq(MarketplaceTable.organizationId, organizationId), + isNull(MarketplacePluginTable.removedAt), + isNull(MarketplaceTable.deletedAt), + inArray(MarketplacePluginTable.pluginId, pluginIds), + )) + + for (const row of rows) { + const existing = byPlugin.get(row.pluginId) ?? [] + existing.push({ id: row.marketplaceId, name: row.marketplaceName }) + byPlugin.set(row.pluginId, existing) + } + return byPlugin +} + +export async function listPlugins(input: { context: PluginArchActorContext; cursor?: string; limit?: number; q?: string; status?: PluginRow["status"] }) { + const rows = await db + .select() + .from(PluginTable) + .where(eq(PluginTable.organizationId, input.context.organizationContext.organization.id)) + .orderBy(desc(PluginTable.updatedAt), desc(PluginTable.id)) + + const memberships = await db + .select({ pluginId: PluginConfigObjectTable.pluginId, count: PluginConfigObjectTable.id }) + .from(PluginConfigObjectTable) + .where(isNull(PluginConfigObjectTable.removedAt)) + + const counts = memberships.reduce((accumulator, row) => { + accumulator.set(row.pluginId, (accumulator.get(row.pluginId) ?? 0) + 1) + return accumulator + }, new Map()) + + const marketplaceMembers = await collectPluginMarketplaces( + input.context.organizationContext.organization.id, + rows.map((row) => row.id), + ) + + const visible: ReturnType[] = [] + for (const row of rows) { + const role = await resolvePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "plugin" }) + if (!role) continue + if (input.status && row.status !== input.status) continue + if (input.q) { + const haystack = `${row.name}\n${row.description ?? ""}`.toLowerCase() + if (!haystack.includes(input.q.toLowerCase())) continue + } + visible.push(serializePlugin(row, counts.get(row.id) ?? 0, marketplaceMembers.get(row.id) ?? [])) + } + + return pageItems(visible, input.cursor, input.limit) +} + +export async function getPluginDetail(context: PluginArchActorContext, pluginId: PluginId) { + const row = await ensureVisiblePlugin(context, pluginId) + const memberships = await db.select({ id: PluginConfigObjectTable.id }).from(PluginConfigObjectTable).where(and(eq(PluginConfigObjectTable.pluginId, row.id), isNull(PluginConfigObjectTable.removedAt))) + const marketplaceMembers = await collectPluginMarketplaces(context.organizationContext.organization.id, [row.id]) + return serializePlugin(row, memberships.length, marketplaceMembers.get(row.id) ?? []) +} + +export async function createPlugin(input: { context: PluginArchActorContext; description?: string | null; name: string }) { + const now = new Date() + const row = { + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + deletedAt: null, + description: normalizeOptionalString(input.description ?? undefined), + id: createDenTypeId("plugin"), + name: input.name.trim(), + organizationId: input.context.organizationContext.organization.id, + status: "active" as const, + updatedAt: now, + } + + await db.transaction(async (tx) => { + await tx.insert(PluginTable).values(row) + await tx.insert(PluginAccessGrantTable).values({ + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + id: createDenTypeId("pluginAccessGrant"), + organizationId: input.context.organizationContext.organization.id, + orgMembershipId: input.context.organizationContext.currentMember.id, + orgWide: false, + pluginId: row.id, + role: "manager", + teamId: null, + }) + }) + + return serializePlugin(row, 0) +} + +export async function updatePlugin(input: { context: PluginArchActorContext; description?: string | null; name?: string; pluginId: PluginId }) { + const row = await ensureEditablePlugin(input.context, input.pluginId) + const updatedAt = new Date() + await db.update(PluginTable).set({ + description: input.description === undefined ? row.description : normalizeOptionalString(input.description ?? undefined), + name: input.name?.trim() || row.name, + updatedAt, + }).where(eq(PluginTable.id, row.id)) + return getPluginDetail(input.context, row.id) +} + +export async function setPluginLifecycle(input: { action: "archive" | "restore"; context: PluginArchActorContext; pluginId: PluginId }) { + const row = await ensureVisiblePlugin(input.context, input.pluginId) + await requirePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "plugin", role: "manager" }) + const updatedAt = new Date() + await db.update(PluginTable).set({ + deletedAt: input.action === "archive" ? row.deletedAt : null, + status: input.action === "archive" ? "archived" : "active", + updatedAt, + }).where(eq(PluginTable.id, row.id)) + return getPluginDetail(input.context, row.id) +} + +export async function listPluginMemberships(input: { context: PluginArchActorContext; pluginId: PluginId; includeConfigObjects?: boolean; onlyActive?: boolean }) { + await ensureVisiblePlugin(input.context, input.pluginId) + const memberships = await db + .select() + .from(PluginConfigObjectTable) + .where(input.onlyActive ? and(eq(PluginConfigObjectTable.pluginId, input.pluginId), isNull(PluginConfigObjectTable.removedAt)) : eq(PluginConfigObjectTable.pluginId, input.pluginId)) + .orderBy(desc(PluginConfigObjectTable.createdAt)) + + if (!input.includeConfigObjects) { + return { items: memberships.map((membership) => serializeMembership(membership)), nextCursor: null } + } + + const configObjects = await db.select().from(ConfigObjectTable).where(inArray(ConfigObjectTable.id, memberships.map((membership) => membership.configObjectId))) + const latestVersions = await getLatestVersions(configObjects.map((row) => row.id)) + const byId = new Map>(configObjects.map((row) => [row.id, serializeConfigObject(row, latestVersions.get(row.id) ?? null)])) + return { items: memberships.map((membership) => serializeMembership(membership, byId.get(membership.configObjectId))), nextCursor: null } +} + +export async function addPluginMembership(input: { configObjectId: ConfigObjectId; context: PluginArchActorContext; membershipSource?: PluginMembershipRow["membershipSource"]; pluginId: PluginId }) { + return attachConfigObjectToPlugin({ ...input }) +} + +export async function removePluginMembership(input: { configObjectId: ConfigObjectId; context: PluginArchActorContext; pluginId: PluginId }) { + return removeConfigObjectFromPlugin(input) +} + +export async function listMarketplaces(input: { context: PluginArchActorContext; cursor?: string; limit?: number; q?: string; status?: MarketplaceRow["status"] }) { + const rows = await db + .select() + .from(MarketplaceTable) + .where(eq(MarketplaceTable.organizationId, input.context.organizationContext.organization.id)) + .orderBy(desc(MarketplaceTable.updatedAt), desc(MarketplaceTable.id)) + + const memberships = await db + .select({ marketplaceId: MarketplacePluginTable.marketplaceId, count: MarketplacePluginTable.id }) + .from(MarketplacePluginTable) + .where(isNull(MarketplacePluginTable.removedAt)) + + const counts = memberships.reduce((accumulator, row) => { + accumulator.set(row.marketplaceId, (accumulator.get(row.marketplaceId) ?? 0) + 1) + return accumulator + }, new Map()) + + const visible: ReturnType[] = [] + for (const row of rows) { + const role = await resolvePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "marketplace" }) + if (!role) continue + if (input.status && row.status !== input.status) continue + if (input.q) { + const haystack = `${row.name}\n${row.description ?? ""}`.toLowerCase() + if (!haystack.includes(input.q.toLowerCase())) continue + } + visible.push(serializeMarketplace(row, counts.get(row.id) ?? 0)) + } + + return pageItems(visible, input.cursor, input.limit) +} + +export async function getMarketplaceDetail(context: PluginArchActorContext, marketplaceId: MarketplaceId) { + const row = await ensureVisibleMarketplace(context, marketplaceId) + const memberships = await db + .select({ id: MarketplacePluginTable.id }) + .from(MarketplacePluginTable) + .where(and(eq(MarketplacePluginTable.marketplaceId, row.id), isNull(MarketplacePluginTable.removedAt))) + return serializeMarketplace(row, memberships.length) +} + +export async function createMarketplace(input: { context: PluginArchActorContext; description?: string | null; name: string }) { + const now = new Date() + const row = { + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + deletedAt: null, + description: normalizeOptionalString(input.description ?? undefined), + id: createDenTypeId("marketplace"), + name: input.name.trim(), + organizationId: input.context.organizationContext.organization.id, + status: "active" as const, + updatedAt: now, + } + + await db.transaction(async (tx) => { + await tx.insert(MarketplaceTable).values(row) + await tx.insert(MarketplaceAccessGrantTable).values({ + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + id: createDenTypeId("marketplaceAccessGrant"), + marketplaceId: row.id, + organizationId: input.context.organizationContext.organization.id, + orgMembershipId: input.context.organizationContext.currentMember.id, + orgWide: false, + role: "manager", + teamId: null, + }) + }) + + return serializeMarketplace(row, 0) +} + +export async function updateMarketplace(input: { context: PluginArchActorContext; description?: string | null; marketplaceId: MarketplaceId; name?: string }) { + const row = await ensureEditableMarketplace(input.context, input.marketplaceId) + const updatedAt = new Date() + await db.update(MarketplaceTable).set({ + description: input.description === undefined ? row.description : normalizeOptionalString(input.description ?? undefined), + name: input.name?.trim() || row.name, + updatedAt, + }).where(eq(MarketplaceTable.id, row.id)) + return getMarketplaceDetail(input.context, row.id) +} + +export async function setMarketplaceLifecycle(input: { action: "archive" | "restore"; context: PluginArchActorContext; marketplaceId: MarketplaceId }) { + const row = await ensureVisibleMarketplace(input.context, input.marketplaceId) + await requirePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "marketplace", role: "manager" }) + const updatedAt = new Date() + await db.update(MarketplaceTable).set({ + deletedAt: input.action === "archive" ? row.deletedAt : null, + status: input.action === "archive" ? "archived" : "active", + updatedAt, + }).where(eq(MarketplaceTable.id, row.id)) + return getMarketplaceDetail(input.context, row.id) +} + +export async function listMarketplaceMemberships(input: { context: PluginArchActorContext; includePlugins?: boolean; marketplaceId: MarketplaceId; onlyActive?: boolean }) { + await ensureVisibleMarketplace(input.context, input.marketplaceId) + const memberships = await db + .select() + .from(MarketplacePluginTable) + .where(input.onlyActive ? and(eq(MarketplacePluginTable.marketplaceId, input.marketplaceId), isNull(MarketplacePluginTable.removedAt)) : eq(MarketplacePluginTable.marketplaceId, input.marketplaceId)) + .orderBy(desc(MarketplacePluginTable.createdAt)) + + if (!input.includePlugins) { + return { items: memberships.map((membership) => serializeMarketplaceMembership(membership)), nextCursor: null } + } + + const plugins = memberships.length === 0 + ? [] + : await db.select().from(PluginTable).where(inArray(PluginTable.id, memberships.map((membership) => membership.pluginId))) + const byId = new Map>(plugins.map((row) => [row.id, serializePlugin(row)])) + return { items: memberships.map((membership) => serializeMarketplaceMembership(membership, byId.get(membership.pluginId))), nextCursor: null } +} + +export type MarketplaceResolvedSource = { + connectorAccountId: string + connectorInstanceId: string + accountLogin: string | null + repositoryFullName: string + branch: string | null +} | null + +export async function getMarketplaceResolved(input: { context: PluginArchActorContext; marketplaceId: MarketplaceId }) { + const marketplaceRow = await ensureVisibleMarketplace(input.context, input.marketplaceId) + const organizationId = input.context.organizationContext.organization.id + + const memberships = await db + .select() + .from(MarketplacePluginTable) + .where(and(eq(MarketplacePluginTable.marketplaceId, marketplaceRow.id), isNull(MarketplacePluginTable.removedAt))) + .orderBy(desc(MarketplacePluginTable.createdAt)) + + const pluginIds = memberships.map((membership) => membership.pluginId) + const pluginRows = pluginIds.length === 0 + ? [] + : await db.select().from(PluginTable).where(inArray(PluginTable.id, pluginIds)) + + const activePluginMemberships = pluginIds.length === 0 + ? [] + : await db + .select({ pluginId: PluginConfigObjectTable.pluginId, configObjectId: PluginConfigObjectTable.configObjectId }) + .from(PluginConfigObjectTable) + .where(and(inArray(PluginConfigObjectTable.pluginId, pluginIds), isNull(PluginConfigObjectTable.removedAt))) + const memberCounts = new Map() + for (const entry of activePluginMemberships) { + memberCounts.set(entry.pluginId, (memberCounts.get(entry.pluginId) ?? 0) + 1) + } + + const configObjectIds = [...new Set(activePluginMemberships.map((entry) => entry.configObjectId))] + const configObjectTypeById = new Map() + if (configObjectIds.length > 0) { + const rows = await db + .select({ id: ConfigObjectTable.id, objectType: ConfigObjectTable.objectType }) + .from(ConfigObjectTable) + .where(inArray(ConfigObjectTable.id, configObjectIds)) + for (const row of rows) { + configObjectTypeById.set(row.id, row.objectType) + } + } + + const componentCountsByPlugin = new Map>() + for (const entry of activePluginMemberships) { + const objectType = configObjectTypeById.get(entry.configObjectId) + if (!objectType) continue + let counts = componentCountsByPlugin.get(entry.pluginId) + if (!counts) { + counts = new Map() + componentCountsByPlugin.set(entry.pluginId, counts) + } + counts.set(objectType, (counts.get(objectType) ?? 0) + 1) + } + + const plugins = pluginRows.map((row) => ({ + ...serializePlugin(row, memberCounts.get(row.id) ?? 0), + componentCounts: Object.fromEntries(componentCountsByPlugin.get(row.id) ?? new Map()), + })) + + let source: MarketplaceResolvedSource = null + if (pluginIds.length > 0) { + const mappingRows = await db + .selectDistinct({ connectorInstanceId: ConnectorMappingTable.connectorInstanceId }) + .from(ConnectorMappingTable) + .where(and( + eq(ConnectorMappingTable.organizationId, organizationId), + inArray(ConnectorMappingTable.pluginId, pluginIds), + )) + const connectorInstanceIds = mappingRows.map((entry) => entry.connectorInstanceId) + if (connectorInstanceIds.length === 1) { + const [instance] = await db + .select() + .from(ConnectorInstanceTable) + .where(eq(ConnectorInstanceTable.id, connectorInstanceIds[0])) + .limit(1) + if (instance) { + const [account] = await db + .select() + .from(ConnectorAccountTable) + .where(eq(ConnectorAccountTable.id, instance.connectorAccountId)) + .limit(1) + const [target] = await db + .select() + .from(ConnectorTargetTable) + .where(eq(ConnectorTargetTable.connectorInstanceId, instance.id)) + .orderBy(asc(ConnectorTargetTable.createdAt), asc(ConnectorTargetTable.id)) + .limit(1) + const targetConfig = target?.targetConfigJson && typeof target.targetConfigJson === "object" + ? target.targetConfigJson as Record + : {} + const repositoryFullName = typeof targetConfig.repositoryFullName === "string" + ? targetConfig.repositoryFullName + : instance.remoteId ?? "" + source = { + connectorAccountId: instance.connectorAccountId, + connectorInstanceId: instance.id, + accountLogin: account?.externalAccountRef ?? (account?.metadataJson && typeof account.metadataJson === "object" ? (account.metadataJson as Record).accountLogin as string ?? null : null), + repositoryFullName, + branch: typeof targetConfig.branch === "string" ? targetConfig.branch : target?.externalTargetRef ?? null, + } + } + } + } + + return { + marketplace: serializeMarketplace(marketplaceRow, plugins.length), + plugins, + source, + } +} + +export async function attachPluginToMarketplace(input: { context: PluginArchActorContext; marketplaceId: MarketplaceId; membershipSource?: MarketplaceMembershipRow["membershipSource"]; pluginId: PluginId }) { + await ensureVisiblePlugin(input.context, input.pluginId) + await ensureEditableMarketplace(input.context, input.marketplaceId) + + const existing = await db + .select() + .from(MarketplacePluginTable) + .where(and(eq(MarketplacePluginTable.marketplaceId, input.marketplaceId), eq(MarketplacePluginTable.pluginId, input.pluginId))) + .limit(1) + + const now = new Date() + let membershipId: MarketplaceMembershipId | null = existing[0]?.id ?? null + if (existing[0]) { + await db.update(MarketplacePluginTable).set({ membershipSource: input.membershipSource ?? existing[0].membershipSource, removedAt: null }).where(eq(MarketplacePluginTable.id, existing[0].id)) + } else { + membershipId = createDenTypeId("marketplacePlugin") + await db.insert(MarketplacePluginTable).values({ + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + id: membershipId, + marketplaceId: input.marketplaceId, + membershipSource: input.membershipSource ?? "manual", + organizationId: input.context.organizationContext.organization.id, + pluginId: input.pluginId, + }) + } + + const rows = await db.select().from(MarketplacePluginTable).where(eq(MarketplacePluginTable.id, membershipId!)).limit(1) + return serializeMarketplaceMembership(rows[0]) +} + +export async function removePluginFromMarketplace(input: { context: PluginArchActorContext; marketplaceId: MarketplaceId; pluginId: PluginId }) { + await ensureVisiblePlugin(input.context, input.pluginId) + await ensureEditableMarketplace(input.context, input.marketplaceId) + const rows = await db + .select() + .from(MarketplacePluginTable) + .where(and(eq(MarketplacePluginTable.marketplaceId, input.marketplaceId), eq(MarketplacePluginTable.pluginId, input.pluginId), isNull(MarketplacePluginTable.removedAt))) + .limit(1) + if (!rows[0]) { + throw new PluginArchRouteFailure(404, "marketplace_membership_not_found", "Marketplace membership not found.") + } + await db.update(MarketplacePluginTable).set({ removedAt: new Date() }).where(eq(MarketplacePluginTable.id, rows[0].id)) +} + +export async function listConnectorAccounts(input: { context: PluginArchActorContext; connectorType?: ConnectorAccountRow["connectorType"]; cursor?: string; limit?: number; q?: string; status?: ConnectorAccountRow["status"] }) { + const rows = await db + .select() + .from(ConnectorAccountTable) + .where(eq(ConnectorAccountTable.organizationId, input.context.organizationContext.organization.id)) + .orderBy(desc(ConnectorAccountTable.updatedAt), desc(ConnectorAccountTable.id)) + + const filtered = rows + .filter((row) => !input.connectorType || row.connectorType === input.connectorType) + .filter((row) => !input.status || row.status === input.status) + .filter((row) => !input.q || `${row.displayName}\n${row.remoteId}\n${row.externalAccountRef ?? ""}`.toLowerCase().includes(input.q.toLowerCase())) + .map((row) => serializeConnectorAccount(row, resolveCreatorName(input.context, row.createdByOrgMembershipId))) + + return pageItems(filtered, input.cursor, input.limit) +} + +export async function createConnectorAccount(input: { context: PluginArchActorContext; connectorType: ConnectorAccountRow["connectorType"]; displayName: string; externalAccountRef?: string | null; metadata?: Record; remoteId: string }) { + const now = new Date() + const row = { + connectorType: input.connectorType, + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + displayName: input.displayName.trim(), + externalAccountRef: normalizeOptionalString(input.externalAccountRef ?? undefined), + id: createDenTypeId("connectorAccount"), + metadataJson: input.metadata ?? null, + organizationId: input.context.organizationContext.organization.id, + remoteId: input.remoteId.trim(), + status: "active" as const, + updatedAt: now, + } + await db.insert(ConnectorAccountTable).values(row) + return serializeConnectorAccount(row) +} + +export async function getConnectorAccountDetail(context: PluginArchActorContext, connectorAccountId: ConnectorAccountId) { + const row = await getConnectorAccountRow(context.organizationContext.organization.id, connectorAccountId) + if (!row) { + throw new PluginArchRouteFailure(404, "connector_account_not_found", "Connector account not found.") + } + return serializeConnectorAccount(row, resolveCreatorName(context, row.createdByOrgMembershipId)) +} + +export async function disconnectConnectorAccount(input: { connectorAccountId: ConnectorAccountId; context: PluginArchActorContext; reason?: string }) { + const organizationId = input.context.organizationContext.organization.id + const row = await getConnectorAccountRow(organizationId, input.connectorAccountId) + if (!row) { + throw new PluginArchRouteFailure(404, "connector_account_not_found", "Connector account not found.") + } + + const instances = await db + .select({ id: ConnectorInstanceTable.id }) + .from(ConnectorInstanceTable) + .where(and( + eq(ConnectorInstanceTable.organizationId, organizationId), + eq(ConnectorInstanceTable.connectorAccountId, row.id), + )) + const instanceIds = instances.map((entry) => entry.id) + + const mappingRows = instanceIds.length === 0 + ? [] + : await db + .select({ id: ConnectorMappingTable.id, pluginId: ConnectorMappingTable.pluginId }) + .from(ConnectorMappingTable) + .where(inArray(ConnectorMappingTable.connectorInstanceId, instanceIds)) + const mappingIds = mappingRows.map((entry) => entry.id) + const connectorPluginIds = [...new Set(mappingRows.map((entry) => entry.pluginId).filter((value): value is PluginId => Boolean(value)))] + + const configObjectRows = instanceIds.length === 0 + ? [] + : await db + .select({ id: ConfigObjectTable.id }) + .from(ConfigObjectTable) + .where(inArray(ConfigObjectTable.connectorInstanceId, instanceIds)) + const configObjectIds = configObjectRows.map((entry) => entry.id) + + await db.transaction(async (tx) => { + if (instanceIds.length > 0) { + await tx.delete(ConnectorSourceTombstoneTable).where(inArray(ConnectorSourceTombstoneTable.connectorInstanceId, instanceIds)) + await tx.delete(ConnectorSourceBindingTable).where(inArray(ConnectorSourceBindingTable.connectorInstanceId, instanceIds)) + await tx.delete(ConnectorSyncEventTable).where(inArray(ConnectorSyncEventTable.connectorInstanceId, instanceIds)) + } + + if (configObjectIds.length > 0) { + await tx.delete(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.configObjectId, configObjectIds)) + await tx.delete(ConfigObjectAccessGrantTable).where(inArray(ConfigObjectAccessGrantTable.configObjectId, configObjectIds)) + await tx.delete(ConfigObjectVersionTable).where(inArray(ConfigObjectVersionTable.configObjectId, configObjectIds)) + await tx.delete(ConfigObjectTable).where(inArray(ConfigObjectTable.id, configObjectIds)) + } + + if (mappingIds.length > 0) { + await tx.delete(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.connectorMappingId, mappingIds)) + await tx.delete(ConnectorMappingTable).where(inArray(ConnectorMappingTable.id, mappingIds)) + } + + if (instanceIds.length > 0) { + await tx.delete(ConnectorTargetTable).where(inArray(ConnectorTargetTable.connectorInstanceId, instanceIds)) + await tx.delete(ConnectorInstanceAccessGrantTable).where(inArray(ConnectorInstanceAccessGrantTable.connectorInstanceId, instanceIds)) + await tx.delete(ConnectorInstanceTable).where(inArray(ConnectorInstanceTable.id, instanceIds)) + } + + await cleanupConnectorImportedResources({ seedPluginIds: connectorPluginIds, tx }) + + await tx.delete(ConnectorAccountTable).where(eq(ConnectorAccountTable.id, row.id)) + }) + + return { + deletedConfigObjectCount: configObjectIds.length, + deletedConnectorInstanceCount: instanceIds.length, + deletedConnectorMappingCount: mappingIds.length, + disconnectedAccountId: row.id, + reason: input.reason ?? null, + } +} + +export async function listConnectorInstances(input: { connectorAccountId?: ConnectorAccountId; context: PluginArchActorContext; cursor?: string; limit?: number; pluginId?: PluginId; q?: string; status?: ConnectorInstanceRow["status"] }) { + const rows = await db + .select() + .from(ConnectorInstanceTable) + .where(eq(ConnectorInstanceTable.organizationId, input.context.organizationContext.organization.id)) + .orderBy(desc(ConnectorInstanceTable.updatedAt), desc(ConnectorInstanceTable.id)) + + const filtered: ReturnType[] = [] + for (const row of rows) { + const role = await resolvePluginArchResourceRole({ context: input.context, resourceId: row.id, resourceKind: "connector_instance" }) + if (!role) continue + if (input.connectorAccountId && row.connectorAccountId !== input.connectorAccountId) continue + if (input.status && row.status !== input.status) continue + if (input.q && !`${row.name}\n${row.remoteId ?? ""}`.toLowerCase().includes(input.q.toLowerCase())) continue + if (input.pluginId) { + const mappings = await db + .select({ id: ConnectorMappingTable.id }) + .from(ConnectorMappingTable) + .where(and(eq(ConnectorMappingTable.connectorInstanceId, row.id), eq(ConnectorMappingTable.pluginId, input.pluginId))) + .limit(1) + if (!mappings[0]) continue + } + filtered.push(serializeConnectorInstance(row)) + } + + return pageItems(filtered, input.cursor, input.limit) +} + +export async function createConnectorInstance(input: { connectorAccountId: ConnectorAccountId; connectorType: ConnectorInstanceRow["connectorType"]; config?: Record; context: PluginArchActorContext; name: string; remoteId?: string | null }) { + const account = await getConnectorAccountRow(input.context.organizationContext.organization.id, input.connectorAccountId) + if (!account) { + throw new PluginArchRouteFailure(404, "connector_account_not_found", "Connector account not found.") + } + const now = new Date() + const row = { + connectorAccountId: account.id, + connectorType: input.connectorType, + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + id: createDenTypeId("connectorInstance"), + instanceConfigJson: input.config ?? null, + lastSyncCursor: null, + lastSyncStatus: null, + lastSyncedAt: null, + name: input.name.trim(), + organizationId: input.context.organizationContext.organization.id, + remoteId: normalizeOptionalString(input.remoteId ?? undefined), + status: "active" as const, + updatedAt: now, + } + await db.transaction(async (tx) => { + await tx.insert(ConnectorInstanceTable).values(row) + await tx.insert(ConnectorInstanceAccessGrantTable).values({ + connectorInstanceId: row.id, + createdAt: now, + createdByOrgMembershipId: input.context.organizationContext.currentMember.id, + id: createDenTypeId("connectorInstanceAccessGrant"), + organizationId: input.context.organizationContext.organization.id, + orgMembershipId: input.context.organizationContext.currentMember.id, + orgWide: false, + role: "manager", + teamId: null, + }) + }) + return serializeConnectorInstance(row) +} + +export async function getConnectorInstanceDetail(context: PluginArchActorContext, connectorInstanceId: ConnectorInstanceId) { + const row = await ensureVisibleConnectorInstance(context, connectorInstanceId) + return serializeConnectorInstance(row) +} + +export async function updateConnectorInstance(input: { connectorInstanceId: ConnectorInstanceId; config?: Record; context: PluginArchActorContext; name?: string; remoteId?: string | null; status?: ConnectorInstanceRow["status"] }) { + const row = await ensureEditableConnectorInstance(input.context, input.connectorInstanceId) + await db.update(ConnectorInstanceTable).set({ + instanceConfigJson: input.config === undefined ? row.instanceConfigJson : input.config, + name: input.name?.trim() || row.name, + remoteId: input.remoteId === undefined ? row.remoteId : normalizeOptionalString(input.remoteId ?? undefined), + status: input.status ?? row.status, + updatedAt: new Date(), + }).where(eq(ConnectorInstanceTable.id, row.id)) + return getConnectorInstanceDetail(input.context, row.id) +} + +export async function setConnectorInstanceLifecycle(input: { action: "archive" | "disable" | "enable"; connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const row = await ensureEditableConnectorInstance(input.context, input.connectorInstanceId) + const status = input.action === "archive" ? "archived" : input.action === "disable" ? "disabled" : "active" + await db.update(ConnectorInstanceTable).set({ status, updatedAt: new Date() }).where(eq(ConnectorInstanceTable.id, row.id)) + return getConnectorInstanceDetail(input.context, row.id) +} + +function commonSelectorRootPath(selectors: string[]): string | null { + const normalized = selectors + .map((selector) => { + let path = selector.trim().replace(/^\/+/, "").replace(/\/+$/, "") + if (path.endsWith("/**")) { + path = path.slice(0, -3) + } + const knownLeafSegments = ["skills", "commands", "agents", "hooks", "monitors", "mcp", ".mcp.json", ".lsp.json", "settings.json", "hooks.json"] + for (const leaf of knownLeafSegments) { + if (path === leaf) return "" + if (path.endsWith(`/${leaf}`)) return path.slice(0, -(leaf.length + 1)) + } + return path + }) + .filter((path): path is string => path !== null) + + if (normalized.length === 0) return null + if (normalized.every((path) => path === normalized[0])) { + return normalized[0] + } + + const parts = normalized[0].split("/") + for (let index = parts.length; index > 0; index -= 1) { + const candidate = parts.slice(0, index).join("/") + if (normalized.every((path) => path === candidate || path.startsWith(`${candidate}/`))) { + return candidate + } + } + return "" +} + +async function assertConnectorImportedResourceCleanup(input: { + marketplaceIdsToDelete: MarketplaceId[] + pluginIdsToDelete: PluginId[] + tx: DbTransaction +}) { + if (input.pluginIdsToDelete.length > 0) { + const [remainingPlugins, remainingPluginMappings, remainingPluginMemberships, remainingPluginGrants] = await Promise.all([ + input.tx.select({ id: PluginTable.id }).from(PluginTable).where(inArray(PluginTable.id, input.pluginIdsToDelete)), + input.tx.select({ id: ConnectorMappingTable.id }).from(ConnectorMappingTable).where(inArray(ConnectorMappingTable.pluginId, input.pluginIdsToDelete)), + input.tx.select({ id: PluginConfigObjectTable.id }).from(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.pluginId, input.pluginIdsToDelete)), + input.tx.select({ id: PluginAccessGrantTable.id }).from(PluginAccessGrantTable).where(inArray(PluginAccessGrantTable.pluginId, input.pluginIdsToDelete)), + ]) + + if (remainingPlugins.length > 0 || remainingPluginMappings.length > 0 || remainingPluginMemberships.length > 0 || remainingPluginGrants.length > 0) { + throw new Error("Connector cleanup left plugin records behind.") + } + } + + if (input.marketplaceIdsToDelete.length > 0) { + const [remainingMarketplaces, remainingMarketplaceMemberships, remainingMarketplaceGrants] = await Promise.all([ + input.tx.select({ id: MarketplaceTable.id }).from(MarketplaceTable).where(inArray(MarketplaceTable.id, input.marketplaceIdsToDelete)), + input.tx.select({ id: MarketplacePluginTable.id }).from(MarketplacePluginTable).where(inArray(MarketplacePluginTable.marketplaceId, input.marketplaceIdsToDelete)), + input.tx.select({ id: MarketplaceAccessGrantTable.id }).from(MarketplaceAccessGrantTable).where(inArray(MarketplaceAccessGrantTable.marketplaceId, input.marketplaceIdsToDelete)), + ]) + + if (remainingMarketplaces.length > 0 || remainingMarketplaceMemberships.length > 0 || remainingMarketplaceGrants.length > 0) { + throw new Error("Connector cleanup left marketplace records behind.") + } + } +} + +async function cleanupConnectorImportedResources(input: { + seedPluginIds: PluginId[] + tx: DbTransaction +}) { + const seedPluginIds = uniqueIds(input.seedPluginIds) + if (seedPluginIds.length === 0) { + return { deletedMarketplaceCount: 0, deletedPluginCount: 0 } + } + + const connectorMarketplaceRows = await input.tx + .select({ marketplaceId: MarketplacePluginTable.marketplaceId }) + .from(MarketplacePluginTable) + .where(and( + inArray(MarketplacePluginTable.pluginId, seedPluginIds), + eq(MarketplacePluginTable.membershipSource, "connector"), + isNull(MarketplacePluginTable.removedAt), + )) + const candidateMarketplaceIds = uniqueIds(connectorMarketplaceRows.map((row) => row.marketplaceId)) + + const activeMarketplaceMemberships = candidateMarketplaceIds.length === 0 + ? [] + : await input.tx + .select({ + marketplaceId: MarketplacePluginTable.marketplaceId, + membershipSource: MarketplacePluginTable.membershipSource, + pluginId: MarketplacePluginTable.pluginId, + }) + .from(MarketplacePluginTable) + .where(and( + inArray(MarketplacePluginTable.marketplaceId, candidateMarketplaceIds), + isNull(MarketplacePluginTable.removedAt), + )) + + const candidatePluginIds = uniqueIds([ + ...seedPluginIds, + ...activeMarketplaceMemberships + .filter((membership) => membership.membershipSource === "connector") + .map((membership) => membership.pluginId), + ]) + + const activePluginMembershipRows = candidatePluginIds.length === 0 + ? [] + : await input.tx + .select({ pluginId: PluginConfigObjectTable.pluginId }) + .from(PluginConfigObjectTable) + .where(and( + inArray(PluginConfigObjectTable.pluginId, candidatePluginIds), + isNull(PluginConfigObjectTable.removedAt), + )) + + const activeMappingRows = candidatePluginIds.length === 0 + ? [] + : await input.tx + .select({ pluginId: ConnectorMappingTable.pluginId }) + .from(ConnectorMappingTable) + .where(inArray(ConnectorMappingTable.pluginId, candidatePluginIds)) + + const { marketplaceIdsToDelete, pluginIdsToDelete } = planConnectorImportedResourceCleanup({ + activeMarketplaceMemberships, + activeMappingPluginIds: activeMappingRows + .map((row) => row.pluginId) + .filter((pluginId): pluginId is PluginId => Boolean(pluginId)), + activePluginMembershipPluginIds: activePluginMembershipRows.map((row) => row.pluginId), + candidateMarketplaceIds, + candidatePluginIds, + }) + + if (pluginIdsToDelete.length > 0) { + await input.tx.delete(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.pluginId, pluginIdsToDelete)) + await input.tx.delete(MarketplacePluginTable).where(inArray(MarketplacePluginTable.pluginId, pluginIdsToDelete)) + await input.tx.delete(PluginAccessGrantTable).where(inArray(PluginAccessGrantTable.pluginId, pluginIdsToDelete)) + await input.tx.delete(PluginTable).where(inArray(PluginTable.id, pluginIdsToDelete)) + } + + if (marketplaceIdsToDelete.length > 0) { + await input.tx.delete(MarketplacePluginTable).where(inArray(MarketplacePluginTable.marketplaceId, marketplaceIdsToDelete)) + await input.tx.delete(MarketplaceAccessGrantTable).where(inArray(MarketplaceAccessGrantTable.marketplaceId, marketplaceIdsToDelete)) + await input.tx.delete(MarketplaceTable).where(inArray(MarketplaceTable.id, marketplaceIdsToDelete)) + } + + await assertConnectorImportedResourceCleanup({ + marketplaceIdsToDelete, + pluginIdsToDelete, + tx: input.tx, + }) + + return { + deletedMarketplaceCount: marketplaceIdsToDelete.length, + deletedPluginCount: pluginIdsToDelete.length, + } +} + +export async function getConnectorInstanceConfiguration(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const instance = await ensureVisibleConnectorInstance(input.context, input.connectorInstanceId) + const mappings = await db + .select() + .from(ConnectorMappingTable) + .where(eq(ConnectorMappingTable.connectorInstanceId, instance.id)) + .orderBy(desc(ConnectorMappingTable.createdAt), desc(ConnectorMappingTable.id)) + + const pluginIds = [...new Set(mappings.map((row) => row.pluginId).filter((value): value is PluginId => Boolean(value)))] + const pluginRows = pluginIds.length === 0 + ? [] + : await db.select().from(PluginTable).where(inArray(PluginTable.id, pluginIds)) + const memberships = pluginIds.length === 0 + ? [] + : await db + .select({ pluginId: PluginConfigObjectTable.pluginId, configObjectId: PluginConfigObjectTable.configObjectId }) + .from(PluginConfigObjectTable) + .where(and(inArray(PluginConfigObjectTable.pluginId, pluginIds), isNull(PluginConfigObjectTable.removedAt))) + const configObjectIds = [...new Set(memberships.map((entry) => entry.configObjectId))] + const configObjectTypeById = new Map() + if (configObjectIds.length > 0) { + const rows = await db + .select({ id: ConfigObjectTable.id, objectType: ConfigObjectTable.objectType }) + .from(ConfigObjectTable) + .where(inArray(ConfigObjectTable.id, configObjectIds)) + for (const row of rows) { + configObjectTypeById.set(row.id, row.objectType) + } + } + + const pluginComponentCounts = new Map>() + const membershipCounts = new Map() + for (const membership of memberships) { + membershipCounts.set(membership.pluginId, (membershipCounts.get(membership.pluginId) ?? 0) + 1) + const objectType = configObjectTypeById.get(membership.configObjectId) + if (!objectType) continue + let counts = pluginComponentCounts.get(membership.pluginId) + if (!counts) { + counts = new Map() + pluginComponentCounts.set(membership.pluginId, counts) + } + counts.set(objectType, (counts.get(objectType) ?? 0) + 1) + } + + const pluginRootPaths = new Map() + for (const pluginId of pluginIds) { + const selectors = mappings + .filter((mapping) => mapping.pluginId === pluginId) + .map((mapping) => mapping.selector) + pluginRootPaths.set(pluginId, commonSelectorRootPath(selectors)) + } + + const configObjectRows = await db + .select({ id: ConfigObjectTable.id }) + .from(ConfigObjectTable) + .where(eq(ConfigObjectTable.connectorInstanceId, instance.id)) + + const instanceConfig = instance.instanceConfigJson && typeof instance.instanceConfigJson === "object" + ? instance.instanceConfigJson as Record + : {} + const savedAutoImport = instanceConfig.autoImportNewPlugins + + return { + autoImportNewPlugins: typeof savedAutoImport === "boolean" ? savedAutoImport : true, + configuredPlugins: pluginRows.map((row) => ({ + ...serializePlugin(row, membershipCounts.get(row.id) ?? 0), + componentCounts: Object.fromEntries(pluginComponentCounts.get(row.id) ?? new Map()), + rootPath: pluginRootPaths.get(row.id) ?? null, + })), + connectorInstance: serializeConnectorInstance(instance), + importedConfigObjectCount: configObjectRows.length, + mappingCount: mappings.length, + } +} + +export async function setConnectorInstanceAutoImport(input: { autoImportNewPlugins: boolean; connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const instance = await ensureEditableConnectorInstance(input.context, input.connectorInstanceId) + const currentConfig = instance.instanceConfigJson && typeof instance.instanceConfigJson === "object" + ? instance.instanceConfigJson as Record + : {} + await db.update(ConnectorInstanceTable).set({ + instanceConfigJson: { + ...currentConfig, + autoImportNewPlugins: input.autoImportNewPlugins, + }, + updatedAt: new Date(), + }).where(eq(ConnectorInstanceTable.id, instance.id)) + + return getConnectorInstanceConfiguration({ connectorInstanceId: instance.id, context: input.context }) +} + +export async function removeConnectorInstance(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const instance = await ensureEditableConnectorInstance(input.context, input.connectorInstanceId) + + const mappingRows = await db + .select({ id: ConnectorMappingTable.id, pluginId: ConnectorMappingTable.pluginId }) + .from(ConnectorMappingTable) + .where(eq(ConnectorMappingTable.connectorInstanceId, instance.id)) + const mappingIds = mappingRows.map((entry) => entry.id) + const pluginIds = [...new Set(mappingRows.map((entry) => entry.pluginId).filter((value): value is PluginId => Boolean(value)))] + + const configObjectRows = await db + .select({ id: ConfigObjectTable.id }) + .from(ConfigObjectTable) + .where(eq(ConfigObjectTable.connectorInstanceId, instance.id)) + const configObjectIds = configObjectRows.map((entry) => entry.id) + + await db.transaction(async (tx) => { + await tx.delete(ConnectorSourceTombstoneTable).where(eq(ConnectorSourceTombstoneTable.connectorInstanceId, instance.id)) + await tx.delete(ConnectorSourceBindingTable).where(eq(ConnectorSourceBindingTable.connectorInstanceId, instance.id)) + await tx.delete(ConnectorSyncEventTable).where(eq(ConnectorSyncEventTable.connectorInstanceId, instance.id)) + + if (configObjectIds.length > 0) { + await tx.delete(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.configObjectId, configObjectIds)) + await tx.delete(ConfigObjectAccessGrantTable).where(inArray(ConfigObjectAccessGrantTable.configObjectId, configObjectIds)) + await tx.delete(ConfigObjectVersionTable).where(inArray(ConfigObjectVersionTable.configObjectId, configObjectIds)) + await tx.delete(ConfigObjectTable).where(inArray(ConfigObjectTable.id, configObjectIds)) + } + + if (mappingIds.length > 0) { + await tx.delete(PluginConfigObjectTable).where(inArray(PluginConfigObjectTable.connectorMappingId, mappingIds)) + await tx.delete(ConnectorMappingTable).where(inArray(ConnectorMappingTable.id, mappingIds)) + } + + await tx.delete(ConnectorTargetTable).where(eq(ConnectorTargetTable.connectorInstanceId, instance.id)) + await tx.delete(ConnectorInstanceAccessGrantTable).where(eq(ConnectorInstanceAccessGrantTable.connectorInstanceId, instance.id)) + await tx.delete(ConnectorInstanceTable).where(eq(ConnectorInstanceTable.id, instance.id)) + + await cleanupConnectorImportedResources({ seedPluginIds: pluginIds, tx }) + }) + + return { + deletedConfigObjectCount: configObjectIds.length, + deletedConnectorMappingCount: mappingIds.length, + removedConnectorInstanceId: instance.id, + } +} + +export async function listConnectorTargets(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext; cursor?: string; limit?: number; q?: string; targetKind?: ConnectorTargetRow["targetKind"] }) { + await ensureVisibleConnectorInstance(input.context, input.connectorInstanceId) + const rows = await db + .select() + .from(ConnectorTargetTable) + .where(eq(ConnectorTargetTable.connectorInstanceId, input.connectorInstanceId)) + .orderBy(desc(ConnectorTargetTable.updatedAt), desc(ConnectorTargetTable.id)) + + const filtered = rows + .filter((row) => !input.targetKind || row.targetKind === input.targetKind) + .filter((row) => !input.q || `${row.remoteId}\n${row.externalTargetRef ?? ""}`.toLowerCase().includes(input.q.toLowerCase())) + .map((row) => serializeConnectorTarget(row)) + + return pageItems(filtered, input.cursor, input.limit) +} + +export async function createConnectorTarget(input: { config: Record; connectorInstanceId: ConnectorInstanceId; connectorType: ConnectorTargetRow["connectorType"]; context: PluginArchActorContext; externalTargetRef?: string | null; remoteId: string; targetKind: ConnectorTargetRow["targetKind"] }) { + await ensureEditableConnectorInstance(input.context, input.connectorInstanceId) + const row = { + connectorInstanceId: input.connectorInstanceId, + connectorType: input.connectorType, + createdAt: new Date(), + externalTargetRef: normalizeOptionalString(input.externalTargetRef ?? undefined), + id: createDenTypeId("connectorTarget"), + organizationId: input.context.organizationContext.organization.id, + remoteId: input.remoteId.trim(), + targetConfigJson: input.config, + targetKind: input.targetKind, + updatedAt: new Date(), + } + await db.insert(ConnectorTargetTable).values(row) + return serializeConnectorTarget(row) +} + +export async function getConnectorTargetDetail(context: PluginArchActorContext, connectorTargetId: ConnectorTargetId) { + const target = await getConnectorTargetRow(context.organizationContext.organization.id, connectorTargetId) + if (!target) throw new PluginArchRouteFailure(404, "connector_target_not_found", "Connector target not found.") + await ensureVisibleConnectorInstance(context, target.connectorInstanceId) + return serializeConnectorTarget(target) +} + +export async function updateConnectorTarget(input: { config?: Record; connectorTargetId: ConnectorTargetId; context: PluginArchActorContext; externalTargetRef?: string | null; remoteId?: string }) { + const target = await getConnectorTargetRow(input.context.organizationContext.organization.id, input.connectorTargetId) + if (!target) throw new PluginArchRouteFailure(404, "connector_target_not_found", "Connector target not found.") + await ensureEditableConnectorInstance(input.context, target.connectorInstanceId) + await db.update(ConnectorTargetTable).set({ + externalTargetRef: input.externalTargetRef === undefined ? target.externalTargetRef : normalizeOptionalString(input.externalTargetRef ?? undefined), + remoteId: input.remoteId?.trim() || target.remoteId, + targetConfigJson: input.config === undefined ? target.targetConfigJson : input.config, + updatedAt: new Date(), + }).where(eq(ConnectorTargetTable.id, target.id)) + return getConnectorTargetDetail(input.context, target.id) +} + +export async function queueConnectorTargetResync(input: { connectorTargetId: ConnectorTargetId; context: PluginArchActorContext }) { + const target = await getConnectorTargetRow(input.context.organizationContext.organization.id, input.connectorTargetId) + if (!target) throw new PluginArchRouteFailure(404, "connector_target_not_found", "Connector target not found.") + const instance = await ensureEditableConnectorInstance(input.context, target.connectorInstanceId) + const eventId = createDenTypeId("connectorSyncEvent") + await db.insert(ConnectorSyncEventTable).values({ + completedAt: null, + connectorInstanceId: instance.id, + connectorTargetId: target.id, + connectorType: target.connectorType, + eventType: "manual_resync", + externalEventRef: null, + id: eventId, + organizationId: instance.organizationId, + remoteId: target.remoteId, + sourceRevisionRef: null, + startedAt: new Date(), + status: "queued", + summaryJson: { queuedBy: input.context.organizationContext.currentMember.id }, + }) + return { id: eventId } +} + +export async function listConnectorMappings(input: { connectorTargetId: ConnectorTargetId; context: PluginArchActorContext; cursor?: string; limit?: number; mappingKind?: ConnectorMappingRow["mappingKind"]; objectType?: ConnectorMappingRow["objectType"]; pluginId?: PluginId; q?: string }) { + const target = await getConnectorTargetRow(input.context.organizationContext.organization.id, input.connectorTargetId) + if (!target) throw new PluginArchRouteFailure(404, "connector_target_not_found", "Connector target not found.") + await ensureVisibleConnectorInstance(input.context, target.connectorInstanceId) + const rows = await db.select().from(ConnectorMappingTable).where(eq(ConnectorMappingTable.connectorTargetId, target.id)).orderBy(desc(ConnectorMappingTable.updatedAt), desc(ConnectorMappingTable.id)) + const filtered = rows + .filter((row) => !input.mappingKind || row.mappingKind === input.mappingKind) + .filter((row) => !input.objectType || row.objectType === input.objectType) + .filter((row) => !input.pluginId || row.pluginId === input.pluginId) + .filter((row) => !input.q || `${row.selector}\n${row.remoteId ?? ""}`.toLowerCase().includes(input.q.toLowerCase())) + .map((row) => serializeConnectorMapping(row)) + return pageItems(filtered, input.cursor, input.limit) +} + +export async function createConnectorMapping(input: { autoAddToPlugin: boolean; config?: Record; connectorTargetId: ConnectorTargetId; context: PluginArchActorContext; mappingKind: ConnectorMappingRow["mappingKind"]; objectType: ConnectorMappingRow["objectType"]; pluginId?: PluginId | null; selector: string }) { + const target = await getConnectorTargetRow(input.context.organizationContext.organization.id, input.connectorTargetId) + if (!target) throw new PluginArchRouteFailure(404, "connector_target_not_found", "Connector target not found.") + await ensureEditableConnectorInstance(input.context, target.connectorInstanceId) + if (input.pluginId) { + await ensureEditablePlugin(input.context, input.pluginId) + } + const row = { + autoAddToPlugin: input.autoAddToPlugin, + connectorInstanceId: target.connectorInstanceId, + connectorTargetId: target.id, + connectorType: target.connectorType, + createdAt: new Date(), + id: createDenTypeId("connectorMapping"), + mappingConfigJson: input.config ?? null, + mappingKind: input.mappingKind, + objectType: input.objectType, + organizationId: input.context.organizationContext.organization.id, + pluginId: input.pluginId ?? null, + remoteId: null, + selector: input.selector.trim(), + updatedAt: new Date(), + } + await db.insert(ConnectorMappingTable).values(row) + return serializeConnectorMapping(row) +} + +export async function updateConnectorMapping(input: { autoAddToPlugin?: boolean; config?: Record; connectorMappingId: ConnectorMappingId; context: PluginArchActorContext; objectType?: ConnectorMappingRow["objectType"]; pluginId?: PluginId | null; selector?: string }) { + const mapping = await getConnectorMappingRow(input.context.organizationContext.organization.id, input.connectorMappingId) + if (!mapping) throw new PluginArchRouteFailure(404, "connector_mapping_not_found", "Connector mapping not found.") + await ensureEditableConnectorInstance(input.context, mapping.connectorInstanceId) + if (input.pluginId) { + await ensureEditablePlugin(input.context, input.pluginId) + } + await db.update(ConnectorMappingTable).set({ + autoAddToPlugin: input.autoAddToPlugin ?? mapping.autoAddToPlugin, + mappingConfigJson: input.config === undefined ? mapping.mappingConfigJson : input.config, + objectType: input.objectType ?? mapping.objectType, + pluginId: input.pluginId === undefined ? mapping.pluginId : input.pluginId, + selector: input.selector?.trim() || mapping.selector, + updatedAt: new Date(), + }).where(eq(ConnectorMappingTable.id, mapping.id)) + return serializeConnectorMapping({ ...mapping, autoAddToPlugin: input.autoAddToPlugin ?? mapping.autoAddToPlugin, mappingConfigJson: input.config === undefined ? mapping.mappingConfigJson : input.config, objectType: input.objectType ?? mapping.objectType, pluginId: input.pluginId === undefined ? mapping.pluginId : input.pluginId, selector: input.selector?.trim() || mapping.selector, updatedAt: new Date() }) +} + +export async function deleteConnectorMapping(input: { connectorMappingId: ConnectorMappingId; context: PluginArchActorContext }) { + const mapping = await getConnectorMappingRow(input.context.organizationContext.organization.id, input.connectorMappingId) + if (!mapping) throw new PluginArchRouteFailure(404, "connector_mapping_not_found", "Connector mapping not found.") + await ensureEditableConnectorInstance(input.context, mapping.connectorInstanceId) + await db.delete(ConnectorMappingTable).where(eq(ConnectorMappingTable.id, mapping.id)) +} + +export async function listConnectorSyncEvents(input: { connectorInstanceId?: ConnectorInstanceId; connectorTargetId?: ConnectorTargetId; context: PluginArchActorContext; cursor?: string; eventType?: ConnectorSyncEventRow["eventType"]; limit?: number; q?: string; status?: ConnectorSyncEventRow["status"] }) { + const rows = await db + .select({ event: ConnectorSyncEventTable, instance: ConnectorInstanceTable }) + .from(ConnectorSyncEventTable) + .innerJoin(ConnectorInstanceTable, eq(ConnectorSyncEventTable.connectorInstanceId, ConnectorInstanceTable.id)) + .where(eq(ConnectorInstanceTable.organizationId, input.context.organizationContext.organization.id)) + .orderBy(desc(ConnectorSyncEventTable.startedAt), desc(ConnectorSyncEventTable.id)) + + const filtered: ReturnType[] = [] + for (const row of rows) { + const role = await resolvePluginArchResourceRole({ context: input.context, resourceId: row.instance.id, resourceKind: "connector_instance" }) + if (!role) continue + if (input.connectorInstanceId && row.event.connectorInstanceId !== input.connectorInstanceId) continue + if (input.connectorTargetId && row.event.connectorTargetId !== input.connectorTargetId) continue + if (input.eventType && row.event.eventType !== input.eventType) continue + if (input.status && row.event.status !== input.status) continue + if (input.q && !`${row.event.externalEventRef ?? ""}\n${row.event.sourceRevisionRef ?? ""}`.toLowerCase().includes(input.q.toLowerCase())) continue + filtered.push(serializeConnectorSyncEvent(row.event)) + } + return pageItems(filtered, input.cursor, input.limit) +} + +export async function getConnectorSyncEventDetail(context: PluginArchActorContext, connectorSyncEventId: ConnectorSyncEventId) { + const row = await getConnectorSyncEventRow(context.organizationContext.organization.id, connectorSyncEventId) + if (!row) throw new PluginArchRouteFailure(404, "connector_sync_event_not_found", "Connector sync event not found.") + await ensureVisibleConnectorInstance(context, row.connectorInstanceId) + return serializeConnectorSyncEvent(row) +} + +export async function retryConnectorSyncEvent(input: { connectorSyncEventId: ConnectorSyncEventId; context: PluginArchActorContext }) { + const row = await getConnectorSyncEventRow(input.context.organizationContext.organization.id, input.connectorSyncEventId) + if (!row) throw new PluginArchRouteFailure(404, "connector_sync_event_not_found", "Connector sync event not found.") + await ensureEditableConnectorInstance(input.context, row.connectorInstanceId) + await db.update(ConnectorSyncEventTable).set({ completedAt: null, startedAt: new Date(), status: "queued" }).where(eq(ConnectorSyncEventTable.id, row.id)) + return { id: row.id } +} + +function githubConnectorAppConfig() { + try { + return getGithubConnectorAppConfig(env.githubConnectorApp) + } catch (error) { + if (error instanceof GithubConnectorConfigError) { + throw new PluginArchRouteFailure(409, "github_connector_app_not_configured", error.message) + } + throw error + } +} + +export function consumeGithubInstallState(state: string) { + const parsed = verifyGithubInstallStateToken({ secret: env.betterAuthSecret, token: state }) + if (!parsed) { + throw new PluginArchRouteFailure(400, "invalid_github_install_state", "GitHub install state is invalid or expired.") + } + return parsed +} + +function wrapGithubConnectorError(error: unknown): never { + if (error instanceof PluginArchRouteFailure) { + throw error + } + + if (error instanceof GithubConnectorConfigError) { + throw new PluginArchRouteFailure(409, "github_connector_app_not_configured", error.message) + } + + if (error instanceof GithubConnectorRequestError) { + throw new PluginArchRouteFailure(409, "github_connector_request_failed", error.message) + } + + throw error +} + +function normalizeDiscoveryCursor(value: string | undefined) { + return value?.trim() || undefined +} + +function discoveryStep(status: GithubConnectorDiscoveryStep["status"], id: GithubConnectorDiscoveryStep["id"], label: string): GithubConnectorDiscoveryStep { + return { id, label, status } +} + +function buildGithubConnectorDiscoverySteps(input: { + classification: GithubDiscoveryClassification + discoveredPlugins: GithubDiscoveredPlugin[] +}) { + return [ + discoveryStep("completed", "read_repository_structure", "Read repository structure"), + discoveryStep(input.classification === "claude_marketplace_repo" ? "completed" : "warning", "check_marketplace_manifest", "Check for Claude marketplace manifest"), + discoveryStep( + input.classification === "claude_single_plugin_repo" || input.classification === "claude_multi_plugin_repo" + ? "completed" + : "warning", + "check_plugin_manifests", + "Check for plugin manifests", + ), + discoveryStep(input.discoveredPlugins.length > 0 ? "completed" : "warning", "prepare_discovered_plugins", "Prepare discovered plugins"), + ] satisfies GithubConnectorDiscoveryStep[] +} + +function buildGithubDiscoveryImportPlans(input: { discoveredPlugins: GithubDiscoveredPlugin[]; treeEntries: GithubDiscoveryTreeEntry[] }) { + return Object.fromEntries(input.discoveredPlugins.map((plugin) => [ + plugin.key, + discoveryMappingsForPlugin(plugin).map((mapping) => ({ + objectType: mapping.objectType, + paths: importableGithubPathsForMapping({ mapping, treeEntries: input.treeEntries }).map((entry) => entry.path), + selector: mapping.selector, + } satisfies GithubDiscoveryImportPlan)), + ])) satisfies Record +} + +function readGithubDiscoveryCache(config: Record | null) { + const cache = config && isRecord(config.githubDiscoveryCache) ? config.githubDiscoveryCache : null + if (!cache) { + return null + } + + const repositoryFullName = typeof cache.repositoryFullName === "string" ? cache.repositoryFullName : null + const branch = typeof cache.branch === "string" ? cache.branch : null + const ref = typeof cache.ref === "string" ? cache.ref : null + const sourceRevisionRef = typeof cache.sourceRevisionRef === "string" ? cache.sourceRevisionRef : null + const discoveredPlugins = Array.isArray(cache.discoveredPlugins) ? cache.discoveredPlugins as GithubDiscoveredPlugin[] : null + const warnings = Array.isArray(cache.warnings) ? cache.warnings.filter((entry): entry is string => typeof entry === "string") : null + const treeSummary = isRecord(cache.treeSummary) ? cache.treeSummary as GithubConnectorDiscoveryTreeSummary : null + const importPlansByPluginKey = isRecord(cache.importPlansByPluginKey) + ? cache.importPlansByPluginKey as Record + : null + const classification = typeof cache.classification === "string" ? cache.classification as GithubDiscoveryClassification : null + + if (!repositoryFullName || !branch || !ref || !sourceRevisionRef || !discoveredPlugins || !warnings || !treeSummary || !importPlansByPluginKey || !classification) { + return null + } + + return { + branch, + classification, + discoveredPlugins, + importPlansByPluginKey, + marketplace: isRecord(cache.marketplace) || cache.marketplace === null ? cache.marketplace as GithubMarketplaceInfo | null : null, + ref, + repositoryFullName, + sourceRevisionRef, + treeSummary, + warnings, + } satisfies GithubDiscoveryCacheEntry +} + +function withGithubDiscoveryCache(config: Record, cache: GithubDiscoveryCacheEntry) { + return { + ...config, + githubDiscoveryCache: cache, + } +} + +async function getGithubDiscoveryContext(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const connectorInstance = await ensureVisibleConnectorInstance(input.context, input.connectorInstanceId) + if (connectorInstance.connectorType !== "github") { + throw new PluginArchRouteFailure(409, "github_connector_instance_required", "Connector instance is not a GitHub connector.") + } + + const connectorAccount = await getConnectorAccountRow(input.context.organizationContext.organization.id, connectorInstance.connectorAccountId) + if (!connectorAccount || connectorAccount.connectorType !== "github") { + throw new PluginArchRouteFailure(404, "connector_account_not_found", "GitHub connector account not found.") + } + + const targetRows = await db + .select() + .from(ConnectorTargetTable) + .where(eq(ConnectorTargetTable.connectorInstanceId, connectorInstance.id)) + .orderBy(asc(ConnectorTargetTable.createdAt), asc(ConnectorTargetTable.id)) + .limit(1) + const connectorTarget = targetRows[0] ?? null + if (!connectorTarget) { + throw new PluginArchRouteFailure(404, "connector_target_not_found", "GitHub connector target not found.") + } + + const targetConfig = connectorTarget.targetConfigJson && typeof connectorTarget.targetConfigJson === "object" + ? connectorTarget.targetConfigJson as Record + : {} + const repositoryFullName = typeof targetConfig.repositoryFullName === "string" ? targetConfig.repositoryFullName.trim() : connectorTarget.remoteId.trim() + const branch = typeof targetConfig.branch === "string" ? targetConfig.branch.trim() : connectorTarget.externalTargetRef?.trim() ?? "" + const ref = typeof targetConfig.ref === "string" ? targetConfig.ref.trim() : branch ? `refs/heads/${branch}` : "" + const installationId = typeof connectorInstance.instanceConfigJson === "object" && connectorInstance.instanceConfigJson && typeof (connectorInstance.instanceConfigJson as Record).installationId === "number" + ? (connectorInstance.instanceConfigJson as Record).installationId as number + : Number(connectorAccount.remoteId) + + if (!repositoryFullName || !branch || !ref || !Number.isFinite(installationId) || installationId <= 0) { + throw new PluginArchRouteFailure(409, "invalid_github_connector_target", "GitHub connector target is missing repository, branch, or installation metadata.") + } + + const instanceConfigRecord = typeof connectorInstance.instanceConfigJson === "object" && connectorInstance.instanceConfigJson + ? connectorInstance.instanceConfigJson as Record + : null + const autoImportSaved = instanceConfigRecord ? instanceConfigRecord.autoImportNewPlugins : undefined + return { + autoImportNewPlugins: typeof autoImportSaved === "boolean" ? autoImportSaved : true, + branch, + connectorAccount, + connectorInstance, + connectorTarget, + installationId, + ref, + repositoryFullName, + } +} + +async function buildConnectorAutomationContext(input: { connectorInstance: ConnectorInstanceRow }) { + const organizationRows = await db + .select() + .from(OrganizationTable) + .where(eq(OrganizationTable.id, input.connectorInstance.organizationId)) + .limit(1) + const organization = organizationRows[0] as OrganizationRow | undefined + if (!organization) { + throw new PluginArchRouteFailure(404, "organization_not_found", "Organization not found for connector instance.") + } + + const memberRows = await db + .select() + .from(MemberTable) + .where(and( + eq(MemberTable.organizationId, input.connectorInstance.organizationId), + eq(MemberTable.id, input.connectorInstance.createdByOrgMembershipId), + )) + .limit(1) + const member = memberRows[0] as MemberRow | undefined + if (!member) { + throw new PluginArchRouteFailure(404, "member_not_found", "Connector creator member not found.") + } + + return { + memberTeams: [], + organizationContext: { + currentMember: { + createdAt: member.createdAt, + id: member.id, + isOwner: roleIncludesOwner(member.role), + role: member.role, + userId: member.userId, + }, + invitations: [], + members: [], + organization: { + allowedEmailDomains: organization.allowedEmailDomains ?? null, + createdAt: organization.createdAt, + desktopAppRestrictions: organization.desktopAppRestrictions, + id: organization.id, + logo: organization.logo ?? null, + metadata: organization.metadata ? JSON.stringify(organization.metadata) : null, + name: organization.name, + slug: organization.slug, + updatedAt: organization.updatedAt, + }, + roles: [], + teams: [], + }, + } satisfies PluginArchActorContext +} + +async function maybeAutoImportGithubConnectorInstance(input: { + connectorInstance: ConnectorInstanceRow + connectorTarget: ConnectorTargetRow +}) { + const instanceConfig = input.connectorInstance.instanceConfigJson && typeof input.connectorInstance.instanceConfigJson === "object" + ? input.connectorInstance.instanceConfigJson as Record + : {} + if (instanceConfig.autoImportNewPlugins !== true) { + return { autoImported: false as const, createdPluginCount: 0, materializedConfigObjectCount: 0 } + } + + const context = await buildConnectorAutomationContext({ connectorInstance: input.connectorInstance }) + const discovery = await resolveGithubConnectorDiscovery({ + connectorInstanceId: input.connectorInstance.id, + context, + }) + const selectedKeys = discovery.cache.discoveredPlugins + .filter((plugin) => plugin.supported) + .map((plugin) => plugin.key) + + const applied = await applyGithubConnectorDiscovery({ + autoImportNewPlugins: true, + connectorInstanceId: input.connectorInstance.id, + context, + selectedKeys, + }) + + return { + autoImported: true as const, + createdPluginCount: applied.createdPlugins.length, + materializedConfigObjectCount: applied.materializedConfigObjects.length, + } +} + +async function getGithubDiscoveryFileTexts(input: { + branch: string + config: ReturnType + installationId: number + repositoryFullName: string + token?: string + treeEntries: GithubDiscoveryTreeEntry[] +}) { + const interestingPaths = new Set() + const knownPaths = new Set(input.treeEntries.map((entry) => entry.path)) + + if (knownPaths.has(".claude-plugin/marketplace.json")) { + interestingPaths.add(".claude-plugin/marketplace.json") + } + + for (const entry of input.treeEntries) { + if (entry.path.endsWith(".claude-plugin/plugin.json") || entry.path.endsWith("/plugin.json") || entry.path === "plugin.json") { + interestingPaths.add(entry.path) + } + } + + const fileTextByPath: Record = {} + for (const path of interestingPaths) { + try { + fileTextByPath[path] = await getGithubRepositoryTextFile({ + config: input.config, + installationId: input.installationId, + path, + ref: input.branch, + repositoryFullName: input.repositoryFullName, + token: input.token, + }) + } catch (error) { + wrapGithubConnectorError(error) + } + } + + return fileTextByPath +} + +function pagedGithubDiscoveryTree(input: { cursor?: string; entries: GithubDiscoveryTreeEntry[]; limit?: number; prefix?: string }) { + const normalizedPrefix = input.prefix?.trim().replace(/^\/+/, "").replace(/\/+$/, "") + const filtered = input.entries + .filter((entry) => !normalizedPrefix || entry.path === normalizedPrefix || entry.path.startsWith(`${normalizedPrefix}/`)) + .sort((left, right) => left.path.localeCompare(right.path)) + return pageItems(filtered, normalizeDiscoveryCursor(input.cursor), input.limit) +} + +async function computeGithubDiscoverySnapshot(input: { + branch: string + installationId: number + ref: string + repositoryFullName: string + token?: string +}) { + const token = input.token ?? await getGithubInstallationAccessToken({ + config: githubConnectorAppConfig(), + installationId: input.installationId, + }) + let treeSnapshot: Awaited> + try { + treeSnapshot = await getGithubRepositoryTree({ + branch: input.branch, + config: githubConnectorAppConfig(), + installationId: input.installationId, + repositoryFullName: input.repositoryFullName, + token, + }) + } catch (error) { + wrapGithubConnectorError(error) + } + + const fileTextByPath = await getGithubDiscoveryFileTexts({ + branch: input.branch, + config: githubConnectorAppConfig(), + installationId: input.installationId, + repositoryFullName: input.repositoryFullName, + token, + treeEntries: treeSnapshot.treeEntries, + }) + const discovery = buildGithubRepoDiscovery({ + entries: treeSnapshot.treeEntries, + fileTextByPath, + }) + + return { + branch: input.branch, + classification: discovery.classification, + discoveredPlugins: discovery.discoveredPlugins, + importPlansByPluginKey: buildGithubDiscoveryImportPlans({ + discoveredPlugins: discovery.discoveredPlugins, + treeEntries: treeSnapshot.treeEntries, + }), + marketplace: discovery.marketplace, + ref: input.ref, + repositoryFullName: input.repositoryFullName, + sourceRevisionRef: treeSnapshot.headSha, + treeEntries: treeSnapshot.treeEntries, + treeSummary: { + scannedEntryCount: treeSnapshot.treeEntries.length, + strategy: "git-tree-recursive", + truncated: treeSnapshot.truncated, + } satisfies GithubConnectorDiscoveryTreeSummary, + warnings: discovery.warnings, + } satisfies GithubDiscoverySnapshot +} + +async function computeGithubConnectorDiscovery(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext; token?: string }) { + const discoveryContext = await getGithubDiscoveryContext(input) + const snapshot = await computeGithubDiscoverySnapshot({ + branch: discoveryContext.branch, + installationId: discoveryContext.installationId, + ref: discoveryContext.ref, + repositoryFullName: discoveryContext.repositoryFullName, + token: input.token, + }) + + return { + ...snapshot, + connectorInstance: serializeConnectorInstance(discoveryContext.connectorInstance), + connectorTarget: serializeConnectorTarget(discoveryContext.connectorTarget), + } satisfies GithubConnectorDiscoveryComputation +} + +async function persistGithubConnectorDiscoveryCache(input: { + cache: GithubDiscoveryCacheEntry + connectorTargetId: ConnectorTargetId + context: PluginArchActorContext +}) { + const target = await getConnectorTargetRow(input.context.organizationContext.organization.id, input.connectorTargetId) + if (!target) { + return + } + + const targetConfig = target.targetConfigJson && typeof target.targetConfigJson === "object" + ? target.targetConfigJson as Record + : {} + await updateConnectorTarget({ + config: withGithubDiscoveryCache(targetConfig, input.cache), + connectorTargetId: target.id, + context: input.context, + externalTargetRef: target.externalTargetRef, + remoteId: target.remoteId, + }) +} + +async function resolveGithubConnectorDiscovery(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const discoveryContext = await getGithubDiscoveryContext(input) + const targetConfig = discoveryContext.connectorTarget.targetConfigJson && typeof discoveryContext.connectorTarget.targetConfigJson === "object" + ? discoveryContext.connectorTarget.targetConfigJson as Record + : null + const cached = readGithubDiscoveryCache(targetConfig) + if (cached + && cached.branch === discoveryContext.branch + && cached.ref === discoveryContext.ref + && cached.repositoryFullName === discoveryContext.repositoryFullName) { + return { + autoImportNewPlugins: discoveryContext.autoImportNewPlugins, + cache: cached, + connectorInstance: serializeConnectorInstance(discoveryContext.connectorInstance), + connectorTarget: serializeConnectorTarget(discoveryContext.connectorTarget), + } + } + + const computed = await computeGithubConnectorDiscovery(input) + const cache = { + branch: computed.branch, + classification: computed.classification, + discoveredPlugins: computed.discoveredPlugins, + importPlansByPluginKey: computed.importPlansByPluginKey, + marketplace: computed.marketplace, + ref: computed.ref, + repositoryFullName: computed.repositoryFullName, + sourceRevisionRef: computed.sourceRevisionRef, + treeSummary: computed.treeSummary, + warnings: computed.warnings, + } satisfies GithubDiscoveryCacheEntry + await persistGithubConnectorDiscoveryCache({ + cache, + connectorTargetId: computed.connectorTarget.id, + context: input.context, + }) + return { + autoImportNewPlugins: discoveryContext.autoImportNewPlugins, + cache, + connectorInstance: computed.connectorInstance, + connectorTarget: computed.connectorTarget, + } +} + +function discoveryMappingsForPlugin(plugin: GithubDiscoveredPlugin) { + return [ + ...plugin.componentPaths.skills.map((selector) => ({ objectType: "skill" as const, selector: `${selector}/**` })), + ...plugin.componentPaths.commands.map((selector) => ({ objectType: "command" as const, selector: `${selector}/**` })), + ...plugin.componentPaths.agents.map((selector) => ({ objectType: "agent" as const, selector: `${selector}/**` })), + ...plugin.componentPaths.hooks.map((selector) => ({ objectType: "hook" as const, selector })), + ...plugin.componentPaths.mcpServers.map((selector) => ({ objectType: "mcp" as const, selector })), + ] +} + +function mappingSelectorMatchesPath(selector: string, path: string) { + const normalizedSelector = selector.trim().replace(/^\/+/, "") + const normalizedPath = path.trim().replace(/^\/+/, "") + if (normalizedSelector.endsWith("/**")) { + const prefix = normalizedSelector.slice(0, -3) + return normalizedPath.startsWith(`${prefix}/`) + } + return normalizedPath === normalizedSelector +} + +function importableGithubPathsForMapping(input: { + mapping: Pick, "objectType" | "selector"> + treeEntries: GithubDiscoveryTreeEntry[] +}) { + const matchingBlobs = input.treeEntries + .filter((entry) => entry.kind === "blob") + .filter((entry) => mappingSelectorMatchesPath(input.mapping.selector, entry.path)) + + if (input.mapping.objectType === "skill") { + const preferred = matchingBlobs.filter((entry) => entry.path.endsWith("/SKILL.md")) + return preferred.length > 0 ? preferred : matchingBlobs.filter((entry) => entry.path.endsWith(".md")) + } + if (input.mapping.objectType === "agent") { + const preferred = matchingBlobs.filter((entry) => entry.path.endsWith("/AGENT.md")) + return preferred.length > 0 ? preferred : matchingBlobs.filter((entry) => entry.path.endsWith(".md")) + } + if (input.mapping.objectType === "command") { + return matchingBlobs.filter((entry) => entry.path.endsWith(".md")) + } + return matchingBlobs +} + +function parseMarkdownFrontmatter(rawSourceText: string): { body: string; data: Record } { + const match = rawSourceText.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/) + if (!match) { + return { body: rawSourceText, data: {} } + } + + const [, yaml, body] = match + const data: Record = {} + for (const line of yaml.split(/\r?\n/)) { + const trimmed = line.trim() + if (!trimmed || trimmed.startsWith("#")) continue + const colonIndex = trimmed.indexOf(":") + if (colonIndex === -1) continue + const key = trimmed.slice(0, colonIndex).trim() + let value = trimmed.slice(colonIndex + 1).trim() + if (value.length > 1) { + const first = value[0] + const last = value[value.length - 1] + if ((first === '"' && last === '"') || (first === "'" && last === "'")) { + value = value.slice(1, -1) + } + } + if (!key || !value) continue + data[key] = value + } + return { body: body ?? "", data } +} + +function importedObjectMetadata(input: { objectType: ConnectorMappingRow["objectType"]; path: string; rawSourceText: string }) { + const pathSegments = input.path.split("/") + const fileName = pathSegments[pathSegments.length - 1] ?? input.path + const parentName = pathSegments[pathSegments.length - 2] ?? pathSegments[pathSegments.length - 1] ?? "Imported" + const nameFromFile = fileName.replace(/\.[^.]+$/, "") + const preferredName = input.objectType === "skill" || input.objectType === "agent" + ? (fileName.toUpperCase() === "SKILL.MD" || fileName.toUpperCase() === "AGENT.MD" ? parentName : nameFromFile) + : nameFromFile + + const isMarkdown = fileName.toLowerCase().endsWith(".md") || fileName.toLowerCase().endsWith(".mdx") + const frontmatter = isMarkdown ? parseMarkdownFrontmatter(input.rawSourceText) : null + const frontmatterName = frontmatter?.data.name ?? frontmatter?.data.title + const frontmatterDescription = frontmatter?.data.description ?? frontmatter?.data.summary + + const metadata: Record = { + name: frontmatterName?.trim() || preferredName, + relativePath: input.path, + } + if (frontmatterDescription?.trim()) { + metadata.description = frontmatterDescription.trim() + } + if (frontmatter && Object.keys(frontmatter.data).length > 0) { + metadata.frontmatter = frontmatter.data + } + + return { + metadata, + normalizedPayloadJson: (() => { + if (!fileName.endsWith(".json")) { + return undefined + } + try { + const parsed = JSON.parse(input.rawSourceText) as unknown + return typeof parsed === "object" && parsed !== null && !Array.isArray(parsed) ? parsed as Record : undefined + } catch { + return undefined + } + })(), + } +} + +async function materializeGithubImportedObject(input: { + connectorInstance: ReturnType + connectorMapping: ReturnType + connectorTarget: ReturnType + context: PluginArchActorContext + externalLocator: string + rawSourceText: string + sourceRevisionRef: string +}) { + const organizationId = input.context.organizationContext.organization.id + const createdByOrgMembershipId = input.context.organizationContext.currentMember.id + const now = new Date() + const metadata = importedObjectMetadata({ + objectType: input.connectorMapping.objectType, + path: input.externalLocator, + rawSourceText: input.rawSourceText, + }) + const frontmatterRecord = metadata.metadata && typeof metadata.metadata.frontmatter === "object" + ? metadata.metadata.frontmatter as Record + : null + const hasFrontmatter = frontmatterRecord && Object.keys(frontmatterRecord).length > 0 + const projectionRawSource = hasFrontmatter + ? parseMarkdownFrontmatter(input.rawSourceText).body + : input.rawSourceText + const projection = deriveProjection({ + objectType: input.connectorMapping.objectType, + value: { + metadata: metadata.metadata, + normalizedPayloadJson: metadata.normalizedPayloadJson, + rawSourceText: projectionRawSource, + }, + }) + const fileName = input.externalLocator.split("/").filter(Boolean).at(-1) ?? input.externalLocator + const fileExtension = fileName.includes(".") ? fileName.split(".").at(-1) ?? null : null + + const existingBinding = await db + .select() + .from(ConnectorSourceBindingTable) + .where(and( + eq(ConnectorSourceBindingTable.organizationId, organizationId), + eq(ConnectorSourceBindingTable.connectorMappingId, input.connectorMapping.id), + eq(ConnectorSourceBindingTable.externalLocator, input.externalLocator), + isNull(ConnectorSourceBindingTable.deletedAt), + )) + .limit(1) + + if (!existingBinding[0]) { + const configObjectId = createDenTypeId("configObject") + const versionId = createDenTypeId("configObjectVersion") + await db.transaction(async (tx) => { + await tx.insert(ConfigObjectTable).values({ + connectorInstanceId: input.connectorInstance.id, + createdAt: now, + createdByOrgMembershipId, + currentFileExtension: normalizeOptionalString(fileExtension ?? undefined), + currentFileName: fileName, + currentRelativePath: input.externalLocator, + deletedAt: null, + description: projection.description, + id: configObjectId, + objectType: input.connectorMapping.objectType, + organizationId, + searchText: projection.searchText, + sourceMode: "connector", + status: "active", + title: projection.title, + updatedAt: now, + }) + + await tx.insert(ConfigObjectVersionTable).values({ + configObjectId, + connectorSyncEventId: null, + createdAt: now, + createdByOrgMembershipId, + createdVia: "connector", + id: versionId, + isDeletedVersion: false, + normalizedPayloadJson: metadata.normalizedPayloadJson ?? null, + organizationId, + rawSourceText: normalizeOptionalString(input.rawSourceText), + schemaVersion: null, + sourceRevisionRef: input.sourceRevisionRef, + }) + + await tx.insert(ConfigObjectAccessGrantTable).values({ + configObjectId, + createdAt: now, + createdByOrgMembershipId, + id: createDenTypeId("configObjectAccessGrant"), + organizationId, + orgMembershipId: createdByOrgMembershipId, + orgWide: false, + role: "manager", + teamId: null, + }) + + if (input.connectorMapping.pluginId) { + await tx.insert(PluginConfigObjectTable).values({ + configObjectId, + connectorMappingId: input.connectorMapping.id, + createdAt: now, + createdByOrgMembershipId, + id: createDenTypeId("pluginConfigObject"), + membershipSource: "connector", + organizationId, + pluginId: input.connectorMapping.pluginId, + removedAt: null, + }) + } + + await tx.insert(ConnectorSourceBindingTable).values({ + configObjectId, + connectorInstanceId: input.connectorInstance.id, + connectorMappingId: input.connectorMapping.id, + connectorTargetId: input.connectorTarget.id, + connectorType: input.connectorTarget.connectorType, + createdAt: now, + deletedAt: null, + externalLocator: input.externalLocator, + externalStableRef: input.externalLocator, + id: createDenTypeId("connectorSourceBinding"), + lastSeenSourceRevisionRef: input.sourceRevisionRef, + organizationId, + remoteId: input.connectorTarget.remoteId, + status: "active", + updatedAt: now, + }) + }) + + return getConfigObjectDetail(input.context, configObjectId) + } + + const binding = existingBinding[0] + if (binding.lastSeenSourceRevisionRef !== input.sourceRevisionRef) { + const versionId = createDenTypeId("configObjectVersion") + await db.transaction(async (tx) => { + await tx.update(ConfigObjectTable).set({ + currentFileExtension: normalizeOptionalString(fileExtension ?? undefined), + currentFileName: fileName, + currentRelativePath: input.externalLocator, + description: projection.description, + searchText: projection.searchText, + status: "active", + title: projection.title, + updatedAt: now, + }).where(eq(ConfigObjectTable.id, binding.configObjectId)) + + await tx.insert(ConfigObjectVersionTable).values({ + configObjectId: binding.configObjectId, + connectorSyncEventId: null, + createdAt: now, + createdByOrgMembershipId, + createdVia: "connector", + id: versionId, + isDeletedVersion: false, + normalizedPayloadJson: metadata.normalizedPayloadJson ?? null, + organizationId, + rawSourceText: normalizeOptionalString(input.rawSourceText), + schemaVersion: null, + sourceRevisionRef: input.sourceRevisionRef, + }) + + if (input.connectorMapping.pluginId) { + const membership = await tx + .select({ id: PluginConfigObjectTable.id }) + .from(PluginConfigObjectTable) + .where(and( + eq(PluginConfigObjectTable.pluginId, input.connectorMapping.pluginId), + eq(PluginConfigObjectTable.configObjectId, binding.configObjectId), + )) + .limit(1) + if (membership[0]) { + await tx.update(PluginConfigObjectTable).set({ + connectorMappingId: input.connectorMapping.id, + membershipSource: "connector", + removedAt: null, + }).where(eq(PluginConfigObjectTable.id, membership[0].id)) + } else { + await tx.insert(PluginConfigObjectTable).values({ + configObjectId: binding.configObjectId, + connectorMappingId: input.connectorMapping.id, + createdAt: now, + createdByOrgMembershipId, + id: createDenTypeId("pluginConfigObject"), + membershipSource: "connector", + organizationId, + pluginId: input.connectorMapping.pluginId, + removedAt: null, + }) + } + } + + await tx.update(ConnectorSourceBindingTable).set({ + deletedAt: null, + lastSeenSourceRevisionRef: input.sourceRevisionRef, + status: "active", + updatedAt: now, + }).where(eq(ConnectorSourceBindingTable.id, binding.id)) + }) + } + + return getConfigObjectDetail(input.context, binding.configObjectId) +} + +async function materializeGithubImportPlans(input: { + connectorInstance: ReturnType + connectorTarget: ReturnType + context: PluginArchActorContext + importPlans: Array<{ mapping: ReturnType; paths: string[] }> + sourceRevisionRef: string +}) { + const config = githubConnectorAppConfig() + const targetConfig = input.connectorTarget.targetConfigJson && typeof input.connectorTarget.targetConfigJson === "object" + ? input.connectorTarget.targetConfigJson as Record + : {} + const branch = typeof targetConfig.branch === "string" ? targetConfig.branch : input.connectorTarget.externalTargetRef ?? "" + const installationId = typeof input.connectorInstance.instanceConfigJson === "object" && input.connectorInstance.instanceConfigJson && typeof (input.connectorInstance.instanceConfigJson as Record).installationId === "number" + ? (input.connectorInstance.instanceConfigJson as Record).installationId as number + : null + const repositoryFullName = typeof targetConfig.repositoryFullName === "string" ? targetConfig.repositoryFullName : input.connectorTarget.remoteId + if (!installationId || !branch || !repositoryFullName) { + throw new PluginArchRouteFailure(409, "invalid_github_materialization_context", "GitHub connector target is missing required materialization context.") + } + + const token = await getGithubInstallationAccessToken({ + config, + installationId, + }) + const materializedConfigObjects: ReturnType[] = [] + for (const plan of input.importPlans) { + for (const path of plan.paths) { + let rawSourceText: string | null + try { + rawSourceText = await getGithubRepositoryTextFile({ + config, + installationId, + path, + ref: branch, + repositoryFullName, + token, + }) + } catch (error) { + wrapGithubConnectorError(error) + } + if (!rawSourceText) { + continue + } + materializedConfigObjects.push(await materializeGithubImportedObject({ + connectorInstance: input.connectorInstance, + connectorMapping: plan.mapping, + connectorTarget: input.connectorTarget, + context: input.context, + externalLocator: path, + rawSourceText, + sourceRevisionRef: input.sourceRevisionRef, + })) + } + } + + return materializedConfigObjects +} + +async function ensureDiscoveryPlugin(input: { context: PluginArchActorContext; description: string | null; name: string }) { + const existing = await db + .select() + .from(PluginTable) + .where(and( + eq(PluginTable.organizationId, input.context.organizationContext.organization.id), + eq(PluginTable.name, input.name.trim()), + isNull(PluginTable.deletedAt), + )) + .orderBy(asc(PluginTable.createdAt), asc(PluginTable.id)) + .limit(1) + + if (existing[0]) { + return serializePlugin(existing[0], 0) + } + + return createPlugin({ + context: input.context, + description: input.description, + name: input.name, + }) +} + +async function ensureDiscoveryMarketplace(input: { context: PluginArchActorContext; description: string | null; name: string }) { + const existing = await db + .select() + .from(MarketplaceTable) + .where(and( + eq(MarketplaceTable.organizationId, input.context.organizationContext.organization.id), + eq(MarketplaceTable.name, input.name.trim()), + isNull(MarketplaceTable.deletedAt), + )) + .orderBy(asc(MarketplaceTable.createdAt), asc(MarketplaceTable.id)) + .limit(1) + + if (existing[0]) { + return serializeMarketplace(existing[0], 0) + } + + return createMarketplace({ + context: input.context, + description: input.description, + name: input.name, + }) +} + +async function ensureDiscoveryMapping(input: { + connectorTargetId: ConnectorTargetId + context: PluginArchActorContext + objectType: ConnectorMappingRow["objectType"] + pluginId: PluginId + selector: string +}) { + const existing = await db + .select() + .from(ConnectorMappingTable) + .where(and( + eq(ConnectorMappingTable.connectorTargetId, input.connectorTargetId), + eq(ConnectorMappingTable.mappingKind, "path"), + eq(ConnectorMappingTable.objectType, input.objectType), + eq(ConnectorMappingTable.pluginId, input.pluginId), + eq(ConnectorMappingTable.selector, input.selector), + )) + .limit(1) + + if (existing[0]) { + return serializeConnectorMapping(existing[0]) + } + + return createConnectorMapping({ + autoAddToPlugin: true, + config: { + discoverySourceKind: input.objectType, + }, + connectorTargetId: input.connectorTargetId, + context: input.context, + mappingKind: "path", + objectType: input.objectType, + pluginId: input.pluginId, + selector: input.selector, + }) +} + +export async function createGithubConnectorAccount(input: { accountLogin: string; accountType: "Organization" | "User"; context: PluginArchActorContext; displayName: string; installationId: number }) { + return createConnectorAccount({ + connectorType: "github", + context: input.context, + displayName: input.displayName, + metadata: { + accountLogin: input.accountLogin, + accountType: input.accountType, + repositories: [], + repositorySelection: "all", + settingsUrl: null, + }, + remoteId: String(input.installationId), + }) +} + +async function upsertGithubConnectorAccountFromInstallation(input: { context: PluginArchActorContext; installationId: number }) { + let installation: Awaited> + try { + installation = await getGithubInstallationSummary({ + config: githubConnectorAppConfig(), + installationId: input.installationId, + }) + } catch (error) { + wrapGithubConnectorError(error) + } + const organizationId = input.context.organizationContext.organization.id + const existingRows = await db + .select() + .from(ConnectorAccountTable) + .where(and( + eq(ConnectorAccountTable.organizationId, organizationId), + eq(ConnectorAccountTable.connectorType, "github"), + eq(ConnectorAccountTable.remoteId, String(input.installationId)), + )) + .limit(1) + + const metadata = { + accountLogin: installation.accountLogin, + accountType: installation.accountType, + repositories: [], + repositorySelection: installation.repositorySelection, + settingsUrl: installation.settingsUrl, + } + + if (!existingRows[0]) { + return createConnectorAccount({ + connectorType: "github", + context: input.context, + displayName: installation.displayName, + externalAccountRef: installation.accountLogin, + metadata, + remoteId: String(input.installationId), + }) + } + + await db.update(ConnectorAccountTable).set({ + displayName: installation.displayName, + externalAccountRef: installation.accountLogin, + metadataJson: { + ...(existingRows[0].metadataJson ?? {}), + ...metadata, + }, + status: "active", + updatedAt: new Date(), + }).where(eq(ConnectorAccountTable.id, existingRows[0].id)) + + return getConnectorAccountDetail(input.context, existingRows[0].id) +} + +export async function startGithubConnectorInstall(input: { context: PluginArchActorContext; returnPath: string }) { + const returnPath = input.returnPath.trim() + if (!returnPath.startsWith("/") || returnPath.startsWith("//")) { + throw new PluginArchRouteFailure(400, "invalid_return_path", "GitHub install return path must be a safe relative path.") + } + + let app: Awaited> + try { + app = await getGithubAppSummary({ config: githubConnectorAppConfig() }) + } catch (error) { + wrapGithubConnectorError(error) + } + const state = createGithubInstallStateToken({ + orgId: input.context.organizationContext.organization.id, + returnPath, + secret: env.betterAuthSecret, + userId: input.context.organizationContext.currentMember.userId, + }) + + return { + redirectUrl: buildGithubAppInstallUrl({ app, state }), + state, + } +} + +export async function completeGithubConnectorInstall(input: { context: PluginArchActorContext; installationId: number; state: string }) { + const parsedState = consumeGithubInstallState(input.state) + if (parsedState.orgId !== input.context.organizationContext.organization.id) { + throw new PluginArchRouteFailure(409, "github_install_org_mismatch", "GitHub install state does not match the current organization.") + } + if (parsedState.userId !== input.context.organizationContext.currentMember.userId) { + throw new PluginArchRouteFailure(409, "github_install_user_mismatch", "GitHub install state does not match the current user.") + } + + const connectorAccount = await upsertGithubConnectorAccountFromInstallation({ + context: input.context, + installationId: input.installationId, + }) + + return { + connectorAccount, + // Keep install completion fast. The connected-account screen loads repositories next. + repositories: [], + } +} + +export async function getGithubConnectorDiscovery(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext }) { + const discovery = await resolveGithubConnectorDiscovery(input) + return { + autoImportNewPlugins: discovery.autoImportNewPlugins, + classification: discovery.cache.classification, + connectorInstance: discovery.connectorInstance, + connectorTarget: discovery.connectorTarget, + discoveredPlugins: discovery.cache.discoveredPlugins, + repositoryFullName: discovery.cache.repositoryFullName, + sourceRevisionRef: discovery.cache.sourceRevisionRef, + steps: buildGithubConnectorDiscoverySteps({ + classification: discovery.cache.classification, + discoveredPlugins: discovery.cache.discoveredPlugins, + }), + treeSummary: discovery.cache.treeSummary, + warnings: discovery.cache.warnings, + } +} + +export async function getGithubConnectorDiscoveryTree(input: { connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext; cursor?: string; limit?: number; prefix?: string }) { + const discovery = await computeGithubConnectorDiscovery({ connectorInstanceId: input.connectorInstanceId, context: input.context }) + return pagedGithubDiscoveryTree({ + cursor: input.cursor, + entries: discovery.treeEntries, + limit: input.limit, + prefix: input.prefix, + }) +} + +export async function applyGithubConnectorDiscovery(input: { autoImportNewPlugins: boolean; connectorInstanceId: ConnectorInstanceId; context: PluginArchActorContext; selectedKeys: string[] }) { + const discovery = await resolveGithubConnectorDiscovery({ connectorInstanceId: input.connectorInstanceId, context: input.context }) + const selectedKeySet = new Set(input.selectedKeys.map((key) => key.trim()).filter(Boolean)) + const selectedPlugins = discovery.cache.discoveredPlugins.filter((plugin) => plugin.supported && selectedKeySet.has(plugin.key)) + await db.update(ConnectorInstanceTable).set({ + instanceConfigJson: { + ...((discovery.connectorInstance.instanceConfigJson && typeof discovery.connectorInstance.instanceConfigJson === "object") + ? discovery.connectorInstance.instanceConfigJson as Record + : {}), + autoImportNewPlugins: input.autoImportNewPlugins, + }, + updatedAt: new Date(), + }).where(eq(ConnectorInstanceTable.id, discovery.connectorInstance.id)) + + const marketplaceInfo = discovery.cache.marketplace + const marketplaceName = marketplaceInfo?.name?.trim() || discovery.cache.repositoryFullName + const marketplaceDescription = marketplaceInfo?.description?.trim() + ?? `Imported from GitHub marketplace repository ${discovery.cache.repositoryFullName}.` + const createdMarketplace = discovery.cache.classification === "claude_marketplace_repo" + ? await ensureDiscoveryMarketplace({ + context: input.context, + description: marketplaceDescription, + name: marketplaceName, + }) + : null + + const plugins = [] as Array> + const mappings = [] as Array> + const importPlans = [] as Array<{ mapping: ReturnType; paths: string[] }> + for (const discoveredPlugin of selectedPlugins) { + const plugin = await ensureDiscoveryPlugin({ + context: input.context, + description: discoveredPlugin.description, + name: discoveredPlugin.displayName, + }) + plugins.push(plugin) + + if (createdMarketplace) { + await attachPluginToMarketplace({ + context: input.context, + marketplaceId: createdMarketplace.id, + membershipSource: "connector", + pluginId: plugin.id, + }) + } + + for (const plan of discovery.cache.importPlansByPluginKey[discoveredPlugin.key] ?? []) { + const mapping = await ensureDiscoveryMapping({ + connectorTargetId: discovery.connectorTarget.id, + context: input.context, + objectType: plan.objectType, + pluginId: plugin.id, + selector: plan.selector, + }) + mappings.push(mapping) + importPlans.push({ mapping, paths: plan.paths }) + } + } + + const materializedConfigObjects = await materializeGithubImportPlans({ + connectorInstance: discovery.connectorInstance, + connectorTarget: discovery.connectorTarget, + context: input.context, + importPlans, + sourceRevisionRef: discovery.cache.sourceRevisionRef, + }) + + return { + autoImportNewPlugins: input.autoImportNewPlugins, + createdMarketplace, + connectorInstance: discovery.connectorInstance, + connectorTarget: discovery.connectorTarget, + createdPlugins: plugins, + createdMappings: mappings, + materializedConfigObjects, + sourceRevisionRef: discovery.cache.sourceRevisionRef, + } +} + +export async function listGithubRepositories(input: { connectorAccountId: ConnectorAccountId; context: PluginArchActorContext; cursor?: string; limit?: number; q?: string }) { + const account = await getConnectorAccountRow(input.context.organizationContext.organization.id, input.connectorAccountId) + if (!account) { + throw new PluginArchRouteFailure(404, "connector_account_not_found", "Connector account not found.") + } + if (account.connectorType !== "github") { + throw new PluginArchRouteFailure(409, "github_connector_account_required", "Connector account is not a GitHub account.") + } + + const installationId = Number(account.remoteId) + if (!Number.isFinite(installationId) || installationId <= 0) { + throw new PluginArchRouteFailure(409, "invalid_github_installation_id", "Connector account does not have a valid GitHub installation id.") + } + + let repositories: RepositorySummary[] + let installationSummary: Awaited> + try { + repositories = await listGithubInstallationRepositories({ + config: githubConnectorAppConfig(), + installationId, + }) + installationSummary = await getGithubInstallationSummary({ + config: githubConnectorAppConfig(), + installationId, + }) + } catch (error) { + wrapGithubConnectorError(error) + } + + const existingMetadata = account.metadataJson && typeof account.metadataJson === "object" + ? account.metadataJson as Record + : {} + await db.update(ConnectorAccountTable).set({ + metadataJson: { + ...existingMetadata, + repositories: repositories.map((repository) => ({ + defaultBranch: repository.defaultBranch, + fullName: repository.fullName, + hasPluginManifest: repository.hasPluginManifest ?? false, + id: repository.id, + manifestKind: repository.manifestKind ?? null, + marketplacePluginCount: repository.marketplacePluginCount ?? null, + private: repository.private, + })), + repositorySelection: installationSummary.repositorySelection, + settingsUrl: installationSummary.settingsUrl, + }, + updatedAt: new Date(), + }).where(eq(ConnectorAccountTable.id, account.id)) + + const filtered = repositories + .filter((repository) => !input.q || `${repository.fullName}\n${repository.defaultBranch ?? ""}`.toLowerCase().includes(input.q.toLowerCase())) + .map((repository) => ({ ...repository, id: String(repository.id) })) + const page = pageItems(filtered, input.cursor, input.limit) + return { + items: page.items.map((repository) => ({ + defaultBranch: repository.defaultBranch, + fullName: repository.fullName, + hasPluginManifest: Boolean(repository.hasPluginManifest), + id: Number(repository.id), + manifestKind: repository.manifestKind ?? null, + marketplacePluginCount: repository.marketplacePluginCount ?? null, + private: repository.private, + })), + nextCursor: page.nextCursor, + } +} + +export async function validateGithubTarget(input: { + branch: string + config?: ReturnType + installationId: number + ref: string + repositoryFullName: string + repositoryId: number + token?: string +}) { + try { + return await validateGithubInstallationTarget({ + branch: input.branch, + config: input.config ?? githubConnectorAppConfig(), + installationId: input.installationId, + ref: input.ref, + repositoryFullName: input.repositoryFullName, + repositoryId: input.repositoryId, + token: input.token, + }) + } catch (error) { + wrapGithubConnectorError(error) + } +} + +export async function githubSetup(input: { + branch: string + connectorAccountId?: ConnectorAccountId + connectorInstanceName: string + context: PluginArchActorContext + installationId: number + mappings: Array<{ autoAddToPlugin: boolean; config?: Record; mappingKind: ConnectorMappingRow["mappingKind"]; objectType: ConnectorMappingRow["objectType"]; pluginId?: PluginId | null; selector: string }> + ref: string + repositoryFullName: string + repositoryId: number +}) { + const githubConfig = githubConnectorAppConfig() + const installationToken = await getGithubInstallationAccessToken({ + config: githubConfig, + installationId: input.installationId, + }) + const validation = await validateGithubTarget({ + branch: input.branch, + config: githubConfig, + installationId: input.installationId, + ref: input.ref, + repositoryFullName: input.repositoryFullName, + repositoryId: input.repositoryId, + token: installationToken, + }) + if (!validation.repositoryAccessible) { + throw new PluginArchRouteFailure(409, "github_repository_not_accessible", "GitHub repository is not accessible for this installation.") + } + if (!validation.branchExists) { + throw new PluginArchRouteFailure(409, "github_branch_not_found", "GitHub branch/ref could not be validated for this repository.") + } + + const discovery = await computeGithubDiscoverySnapshot({ + branch: input.branch, + installationId: input.installationId, + ref: input.ref, + repositoryFullName: input.repositoryFullName, + token: installationToken, + }) + + let connectorAccountId = input.connectorAccountId as ConnectorAccountId | undefined + let connectorAccountDetail = connectorAccountId ? await getConnectorAccountDetail(input.context, connectorAccountId) : null + if (!connectorAccountId || !connectorAccountDetail) { + connectorAccountDetail = await createGithubConnectorAccount({ + accountLogin: input.repositoryFullName.split("/")[0] ?? input.repositoryFullName, + accountType: "Organization", + context: input.context, + displayName: input.repositoryFullName, + installationId: input.installationId, + }) + connectorAccountId = connectorAccountDetail.id + } + + const connectorInstance = await createConnectorInstance({ + connectorAccountId, + connectorType: "github", + config: { + autoImportNewPlugins: true, + installationId: input.installationId, + }, + context: input.context, + name: input.connectorInstanceName, + remoteId: input.repositoryFullName, + }) + + const connectorTarget = await createConnectorTarget({ + config: withGithubDiscoveryCache({ + branch: input.branch, + defaultBranch: validation.defaultBranch, + ref: input.ref, + repositoryFullName: input.repositoryFullName, + repositoryId: input.repositoryId, + }, { + branch: discovery.branch, + classification: discovery.classification, + discoveredPlugins: discovery.discoveredPlugins, + importPlansByPluginKey: discovery.importPlansByPluginKey, + marketplace: discovery.marketplace, + ref: discovery.ref, + repositoryFullName: discovery.repositoryFullName, + sourceRevisionRef: discovery.sourceRevisionRef, + treeSummary: discovery.treeSummary, + warnings: discovery.warnings, + }), + connectorInstanceId: connectorInstance.id, + connectorType: "github", + context: input.context, + externalTargetRef: input.branch, + remoteId: input.repositoryFullName, + targetKind: "repository_branch", + }) + + for (const mapping of input.mappings) { + await createConnectorMapping({ + autoAddToPlugin: mapping.autoAddToPlugin, + config: mapping.config, + connectorTargetId: connectorTarget.id, + context: input.context, + mappingKind: mapping.mappingKind, + objectType: mapping.objectType, + pluginId: mapping.pluginId, + selector: mapping.selector, + }) + } + + return { + connectorAccount: connectorAccountDetail, + connectorInstance, + connectorTarget, + } +} + +export async function enqueueGithubWebhookSync(input: { + deliveryId: string + event: "installation" | "installation_repositories" | "push" | "repository" + headSha?: string + installationId?: number + payload: Record + ref?: string + repositoryFullName?: string + repositoryId?: number +}) { + if (!input.installationId) { + return { accepted: false as const, reason: "missing installation id" } + } + + const accounts = await db + .select() + .from(ConnectorAccountTable) + .where(and(eq(ConnectorAccountTable.connectorType, "github"), eq(ConnectorAccountTable.remoteId, String(input.installationId)))) + + if (input.event !== "push") { + if (input.event === "installation") { + const action = typeof input.payload.action === "string" ? input.payload.action : null + if (action === "deleted") { + for (const account of accounts) { + await db.update(ConnectorAccountTable).set({ status: "disconnected", updatedAt: new Date() }).where(eq(ConnectorAccountTable.id, account.id)) + } + return { accepted: true as const, queued: false as const } + } + } + return { accepted: false as const, reason: "event ignored" } + } + + if (!input.repositoryFullName || !input.ref || !input.headSha || !input.repositoryId) { + return { accepted: false as const, reason: "missing push metadata" } + } + + const instances = await db + .select({ instance: ConnectorInstanceTable, target: ConnectorTargetTable }) + .from(ConnectorTargetTable) + .innerJoin(ConnectorInstanceTable, eq(ConnectorTargetTable.connectorInstanceId, ConnectorInstanceTable.id)) + .where(and(eq(ConnectorTargetTable.connectorType, "github"), eq(ConnectorTargetTable.remoteId, input.repositoryFullName))) + + const queuedIds: string[] = [] + for (const row of instances) { + const targetConfig = row.target.targetConfigJson ?? {} + const targetRef = typeof targetConfig.ref === "string" ? targetConfig.ref : null + if (targetRef && targetRef !== input.ref) { + continue + } + + const existing = await db + .select({ id: ConnectorSyncEventTable.id }) + .from(ConnectorSyncEventTable) + .where(and( + eq(ConnectorSyncEventTable.connectorTargetId, row.target.id), + eq(ConnectorSyncEventTable.eventType, "push"), + eq(ConnectorSyncEventTable.sourceRevisionRef, input.headSha), + )) + .limit(1) + + let autoImportSummary: { + autoImported: boolean + createdPluginCount: number + materializedConfigObjectCount: number + } + try { + autoImportSummary = await maybeAutoImportGithubConnectorInstance({ + connectorInstance: row.instance, + connectorTarget: row.target, + }) + } catch (error) { + autoImportSummary = { + autoImported: false, + createdPluginCount: 0, + materializedConfigObjectCount: 0, + } + } + + const eventStatus = autoImportSummary.autoImported ? "completed" as const : "queued" as const + const completedAt = autoImportSummary.autoImported ? new Date() : null + + const id = existing[0]?.id ?? createDenTypeId("connectorSyncEvent") + if (existing[0]) { + await db.update(ConnectorSyncEventTable).set({ + completedAt, + externalEventRef: input.deliveryId, + startedAt: new Date(), + status: eventStatus, + summaryJson: { + autoImportApplied: autoImportSummary.autoImported, + autoImportCreatedPluginCount: autoImportSummary.createdPluginCount, + autoImportMaterializedConfigObjectCount: autoImportSummary.materializedConfigObjectCount, + deliveryId: input.deliveryId, + headSha: input.headSha, + repositoryFullName: input.repositoryFullName, + repositoryId: input.repositoryId, + queuedAt: new Date().toISOString(), + ref: input.ref, + }, + }).where(eq(ConnectorSyncEventTable.id, id)) + } else { + await db.insert(ConnectorSyncEventTable).values({ + completedAt, + connectorInstanceId: row.instance.id, + connectorTargetId: row.target.id, + connectorType: "github", + eventType: "push", + externalEventRef: input.deliveryId, + id, + organizationId: row.instance.organizationId, + remoteId: input.repositoryFullName, + sourceRevisionRef: input.headSha, + startedAt: new Date(), + status: eventStatus, + summaryJson: { + autoImportApplied: autoImportSummary.autoImported, + autoImportCreatedPluginCount: autoImportSummary.createdPluginCount, + autoImportMaterializedConfigObjectCount: autoImportSummary.materializedConfigObjectCount, + deliveryId: input.deliveryId, + headSha: input.headSha, + installationId: input.installationId, + repositoryFullName: input.repositoryFullName, + repositoryId: input.repositoryId, + ref: input.ref, + }, + }) + } + queuedIds.push(id) + } + + return queuedIds.length > 0 + ? { accepted: true as const, queued: true as const, syncEventIds: queuedIds } + : { accepted: false as const, reason: "event ignored" } +} diff --git a/ee/apps/den-api/src/routes/org/roles.ts b/ee/apps/den-api/src/routes/org/roles.ts new file mode 100644 index 0000000000..3d3a692f88 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/roles.ts @@ -0,0 +1,257 @@ +import { and, eq } from "@openwork-ee/den-db/drizzle" +import { InvitationTable, MemberTable, OrganizationRoleTable } from "@openwork-ee/den-db/schema" +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { jsonValidator, paramValidator, requireUserMiddleware, resolveOrganizationContextMiddleware } from "../../middleware/index.js" +import { emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, successSchema, unauthorizedSchema } from "../../openapi.js" +import { serializePermissionRecord } from "../../orgs.js" +import type { OrgRouteVariables } from "./shared.js" +import { createRoleId, ensureOwner, idParamSchema, normalizeRoleName, replaceRoleValue, splitRoles } from "./shared.js" + +const permissionSchema = z.record(z.string(), z.array(z.string())) + +const createRoleSchema = z.object({ + roleName: z.string().trim().min(2).max(64), + permission: permissionSchema, +}) + +const updateRoleSchema = z.object({ + roleName: z.string().trim().min(2).max(64).optional(), + permission: permissionSchema.optional(), +}) + +type OrganizationRoleId = typeof OrganizationRoleTable.$inferSelect.id +const orgRoleParamsSchema = idParamSchema("roleId", "organizationRole") + +export function registerOrgRoleRoutes(app: Hono) { + app.post( + "/v1/roles", + describeRoute({ + tags: ["Roles"], + summary: "Create organization role", + description: "Creates a custom organization role with a named permission map.", + responses: { + 201: jsonResponse("Organization role created successfully.", successSchema), + 400: jsonResponse("The role creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create organization roles.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners can create custom roles.", forbiddenSchema), + 404: jsonResponse("The organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(createRoleSchema), + async (c) => { + const permission = ensureOwner(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const input = c.req.valid("json") + + const roleName = normalizeRoleName(input.roleName) + if (roleName === "owner") { + return c.json({ error: "invalid_role", message: "Owner is managed by the system." }, 400) + } + + const existingByName = await db + .select({ id: OrganizationRoleTable.id }) + .from(OrganizationRoleTable) + .where(and(eq(OrganizationRoleTable.organizationId, payload.organization.id), eq(OrganizationRoleTable.role, roleName))) + .limit(1) + + if (existingByName[0]) { + return c.json({ error: "role_exists", message: "That role already exists in this organization." }, 409) + } + + await db.insert(OrganizationRoleTable).values({ + id: createRoleId(), + organizationId: payload.organization.id, + role: roleName, + permission: serializePermissionRecord(input.permission), + }) + + return c.json({ success: true }, 201) + }, + ) + + app.patch( + "/v1/roles/:roleId", + describeRoute({ + tags: ["Roles"], + summary: "Update organization role", + description: "Updates a custom organization role and propagates role name changes to members and pending invitations.", + responses: { + 200: jsonResponse("Organization role updated successfully.", successSchema), + 400: jsonResponse("The role update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update organization roles.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners can update custom roles.", forbiddenSchema), + 404: jsonResponse("The role or organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgRoleParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(updateRoleSchema), + async (c) => { + const permission = ensureOwner(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const input = c.req.valid("json") + + const params = c.req.valid("param") + let roleId: OrganizationRoleId + try { + roleId = normalizeDenTypeId("organizationRole", params.roleId) + } catch { + return c.json({ error: "role_not_found" }, 404) + } + + const roleRows = await db + .select() + .from(OrganizationRoleTable) + .where(and(eq(OrganizationRoleTable.id, roleId), eq(OrganizationRoleTable.organizationId, payload.organization.id))) + .limit(1) + + const roleRow = roleRows[0] + if (!roleRow) { + return c.json({ error: "role_not_found" }, 404) + } + + const nextRoleName = input.roleName ? normalizeRoleName(input.roleName) : roleRow.role + if (nextRoleName === "owner") { + return c.json({ error: "invalid_role", message: "Owner is managed by the system." }, 400) + } + + if (nextRoleName !== roleRow.role) { + const duplicate = await db + .select({ id: OrganizationRoleTable.id }) + .from(OrganizationRoleTable) + .where(and(eq(OrganizationRoleTable.organizationId, payload.organization.id), eq(OrganizationRoleTable.role, nextRoleName))) + .limit(1) + if (duplicate[0]) { + return c.json({ error: "role_exists", message: "That role name is already in use." }, 409) + } + } + + const nextPermission = input.permission ? serializePermissionRecord(input.permission) : roleRow.permission + + await db + .update(OrganizationRoleTable) + .set({ role: nextRoleName, permission: nextPermission }) + .where(eq(OrganizationRoleTable.id, roleRow.id)) + + if (nextRoleName !== roleRow.role) { + const members = await db + .select() + .from(MemberTable) + .where(eq(MemberTable.organizationId, payload.organization.id)) + + for (const member of members) { + if (!splitRoles(member.role).includes(roleRow.role)) { + continue + } + + await db + .update(MemberTable) + .set({ role: replaceRoleValue(member.role, roleRow.role, nextRoleName) }) + .where(eq(MemberTable.id, member.id)) + } + + const invitations = await db + .select() + .from(InvitationTable) + .where(eq(InvitationTable.organizationId, payload.organization.id)) + + for (const invitation of invitations) { + if (!splitRoles(invitation.role).includes(roleRow.role)) { + continue + } + + await db + .update(InvitationTable) + .set({ role: replaceRoleValue(invitation.role, roleRow.role, nextRoleName) }) + .where(eq(InvitationTable.id, invitation.id)) + } + } + + return c.json({ success: true }) + }, + ) + + app.delete( + "/v1/roles/:roleId", + describeRoute({ + tags: ["Roles"], + summary: "Delete organization role", + description: "Deletes a custom organization role after confirming that no members or pending invitations still depend on it.", + responses: { + 204: emptyResponse("Organization role deleted successfully."), + 400: jsonResponse("The role deletion request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete organization roles.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners can delete custom roles.", forbiddenSchema), + 404: jsonResponse("The role or organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgRoleParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const permission = ensureOwner(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const params = c.req.valid("param") + let roleId: OrganizationRoleId + try { + roleId = normalizeDenTypeId("organizationRole", params.roleId) + } catch { + return c.json({ error: "role_not_found" }, 404) + } + + const roleRows = await db + .select() + .from(OrganizationRoleTable) + .where(and(eq(OrganizationRoleTable.id, roleId), eq(OrganizationRoleTable.organizationId, payload.organization.id))) + .limit(1) + + const roleRow = roleRows[0] + if (!roleRow) { + return c.json({ error: "role_not_found" }, 404) + } + + const membersUsingRole = await db + .select({ role: MemberTable.role }) + .from(MemberTable) + .where(eq(MemberTable.organizationId, payload.organization.id)) + + if (membersUsingRole.some((member) => splitRoles(member.role).includes(roleRow.role))) { + return c.json({ error: "role_in_use", message: "Update members using this role before deleting it." }, 400) + } + + const invitationsUsingRole = await db + .select({ role: InvitationTable.role }) + .from(InvitationTable) + .where(eq(InvitationTable.organizationId, payload.organization.id)) + + if (invitationsUsingRole.some((invitation) => splitRoles(invitation.role).includes(roleRow.role))) { + return c.json({ + error: "role_in_use", + message: "Cancel or update pending invitations using this role before deleting it.", + }, 400) + } + + await db.delete(OrganizationRoleTable).where(eq(OrganizationRoleTable.id, roleRow.id)) + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/shared.ts b/ee/apps/den-api/src/routes/org/shared.ts new file mode 100644 index 0000000000..ef376705fb --- /dev/null +++ b/ee/apps/den-api/src/routes/org/shared.ts @@ -0,0 +1,156 @@ +import { createDenTypeId, type DenTypeIdName } from "@openwork-ee/utils/typeid" +import { z } from "zod" +import type { MemberTeamsContext, OrganizationContextVariables, UserOrganizationsContext } from "../../middleware/index.js" +import { env } from "../../env.js" +import { denTypeIdSchema } from "../../openapi.js" +import type { AuthContextVariables } from "../../session.js" + +export type OrgRouteVariables = + & AuthContextVariables + & Partial + & Partial + & Partial + +export function idParamSchema(key: K, typeName?: DenTypeIdName) { + if (!typeName) { + return z.object({ + [key]: z.string().trim().min(1).max(255), + } as unknown as Record) + } + + return z.object({ + [key]: denTypeIdSchema(typeName), + } as unknown as Record>) +} + +export function splitRoles(value: string) { + return value + .split(",") + .map((entry) => entry.trim()) + .filter(Boolean) +} + +export function memberHasRole(value: string, role: string) { + return splitRoles(value).includes(role) +} + +export function normalizeRoleName(value: string) { + return value + .trim() + .toLowerCase() + .replace(/\s+/g, "-") +} + +export function replaceRoleValue(value: string, previousRole: string, nextRole: string | null) { + const existing = splitRoles(value) + const remaining = existing.filter((role) => role !== previousRole) + + if (nextRole && !remaining.includes(nextRole)) { + remaining.push(nextRole) + } + + return remaining[0] ? remaining.join(",") : "member" +} + +export function getInvitationOrigin() { + return env.betterAuthTrustedOrigins.find((origin) => origin !== "*") ?? env.betterAuthUrl +} + +export function buildInvitationLink(invitationId: string) { + return new URL(`/join-org?invite=${encodeURIComponent(invitationId)}`, getInvitationOrigin()).toString() +} + +export function ensureOwner(c: { get: (key: "organizationContext") => OrgRouteVariables["organizationContext"] }) { + const payload = c.get("organizationContext") + if (!payload?.currentMember.isOwner) { + return { + ok: false as const, + response: { + error: "forbidden", + message: "Only workspace owners can manage members and roles.", + }, + } + } + + return { ok: true as const } +} + +export function ensureInviteManager(c: { get: (key: "organizationContext") => OrgRouteVariables["organizationContext"] }) { + const payload = c.get("organizationContext") + if (!payload) { + return { + ok: false as const, + response: { + error: "organization_not_found", + }, + } + } + + if (payload.currentMember.isOwner || memberHasRole(payload.currentMember.role, "admin")) { + return { ok: true as const } + } + + return { + ok: false as const, + response: { + error: "forbidden", + message: "Only workspace owners and admins can invite members.", + }, + } +} + +export function ensureTeamManager(c: { get: (key: "organizationContext") => OrgRouteVariables["organizationContext"] }) { + const payload = c.get("organizationContext") + if (!payload) { + return { + ok: false as const, + response: { + error: "organization_not_found", + }, + } + } + + if (payload.currentMember.isOwner || memberHasRole(payload.currentMember.role, "admin")) { + return { ok: true as const } + } + + return { + ok: false as const, + response: { + error: "forbidden", + message: "Only workspace owners and admins can manage teams.", + }, + } +} + +export function ensureApiKeyManager(c: { get: (key: "organizationContext") => OrgRouteVariables["organizationContext"] }) { + const payload = c.get("organizationContext") + if (!payload) { + return { + ok: false as const, + response: { + error: "organization_not_found", + }, + } + } + + if (payload.currentMember.isOwner || memberHasRole(payload.currentMember.role, "admin")) { + return { ok: true as const } + } + + return { + ok: false as const, + response: { + error: "forbidden", + message: "Only workspace owners and admins can manage API keys.", + }, + } +} + +export function createInvitationId() { + return createDenTypeId("invitation") +} + +export function createRoleId() { + return createDenTypeId("organizationRole") +} diff --git a/ee/apps/den-api/src/routes/org/skills.ts b/ee/apps/den-api/src/routes/org/skills.ts new file mode 100644 index 0000000000..646f0e45b0 --- /dev/null +++ b/ee/apps/den-api/src/routes/org/skills.ts @@ -0,0 +1,1140 @@ +import { and, desc, eq, inArray, isNotNull, or } from "@openwork-ee/den-db/drizzle" +import { + AuthUserTable, + MemberTable, + SkillHubMemberTable, + SkillHubSkillTable, + SkillHubTable, + SkillTable, + TeamTable, +} from "@openwork-ee/den-db/schema" +import { hasSkillFrontmatterName, parseSkillMarkdown } from "@openwork-ee/utils" +import { createDenTypeId, normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { + jsonValidator, + paramValidator, + requireUserMiddleware, + resolveMemberTeamsMiddleware, + resolveOrganizationContextMiddleware, +} from "../../middleware/index.js" +import type { MemberTeamsContext } from "../../middleware/member-teams.js" +import { denTypeIdSchema, emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, successSchema, unauthorizedSchema } from "../../openapi.js" +import type { OrgRouteVariables } from "./shared.js" +import { idParamSchema, memberHasRole } from "./shared.js" + +const skillTextSchema = z.string().superRefine((value, ctx) => { + if (!value.trim()) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Skill content cannot be empty.", + }) + return + } + + if (!hasSkillFrontmatterName(value)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Skill content must start with frontmatter that includes a name.", + }) + } +}) + +const createSkillSchema = z.object({ + skillText: skillTextSchema, + shared: z.enum(["org", "public"]).nullable().optional(), +}) + +const updateSkillSchema = z.object({ + skillText: skillTextSchema.optional(), + shared: z.enum(["org", "public"]).nullable().optional(), +}).superRefine((value, ctx) => { + if (value.skillText === undefined && value.shared === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["skillText"], + message: "Provide at least one field to update.", + }) + } +}) + +const createSkillHubSchema = z.object({ + name: z.string().trim().min(1).max(255), + description: z.string().trim().max(65535).nullish().transform((value) => value || null), +}) + +const updateSkillHubSchema = z.object({ + name: z.string().trim().min(1).max(255).optional(), + description: z.string().trim().max(65535).nullable().optional(), +}).superRefine((value, ctx) => { + if (value.name === undefined && value.description === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["name"], + message: "Provide at least one field to update.", + }) + } +}) + +const addSkillToHubSchema = z.object({ + skillId: denTypeIdSchema("skill"), +}) + +const addSkillHubAccessSchema = z.object({ + orgMembershipId: denTypeIdSchema("member").optional(), + teamId: denTypeIdSchema("team").optional(), +}).superRefine((value, ctx) => { + const count = Number(Boolean(value.orgMembershipId)) + Number(Boolean(value.teamId)) + if (count !== 1) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["orgMembershipId"], + message: "Provide exactly one of orgMembershipId or teamId.", + }) + } +}) + +type SkillId = typeof SkillTable.$inferSelect.id +type SkillHubId = typeof SkillHubTable.$inferSelect.id +type SkillHubMemberId = typeof SkillHubMemberTable.$inferSelect.id +type TeamId = typeof TeamTable.$inferSelect.id +type MemberId = typeof MemberTable.$inferSelect.id +type SkillRow = typeof SkillTable.$inferSelect +type SkillHubRow = typeof SkillHubTable.$inferSelect + +const orgSkillHubParamsSchema = idParamSchema("skillHubId", "skillHub") +const orgSkillParamsSchema = idParamSchema("skillId", "skill") +const orgSkillHubSkillParamsSchema = orgSkillHubParamsSchema.extend(idParamSchema("skillId", "skill").shape) +const orgSkillHubAccessParamsSchema = orgSkillHubParamsSchema.extend(idParamSchema("accessId", "skillHubMember").shape) + +const skillResponseSchema = z.object({ + skill: z.object({}).passthrough(), +}).meta({ ref: "SkillResponse" }) + +const skillListResponseSchema = z.object({ + skills: z.array(z.object({}).passthrough()), +}).meta({ ref: "SkillListResponse" }) + +const skillHubResponseSchema = z.object({ + skillHub: z.object({}).passthrough(), +}).meta({ ref: "SkillHubResponse" }) + +const skillHubListResponseSchema = z.object({ + skillHubs: z.array(z.object({}).passthrough()), +}).meta({ ref: "SkillHubListResponse" }) + +const skillHubAccessResponseSchema = z.object({ + access: z.object({}).passthrough(), +}).meta({ ref: "SkillHubAccessResponse" }) + +const conflictSchema = z.object({ + error: z.string(), + message: z.string().optional(), +}).meta({ ref: "ConflictError" }) + +function parseSkillId(value: string) { + return normalizeDenTypeId("skill", value) +} + +function parseSkillHubId(value: string) { + return normalizeDenTypeId("skillHub", value) +} + +function parseSkillHubMemberId(value: string) { + return normalizeDenTypeId("skillHubMember", value) +} + +function parseMemberId(value: string) { + return normalizeDenTypeId("member", value) +} + +function parseTeamId(value: string) { + return normalizeDenTypeId("team", value) +} + +function parseSkillMetadata(skillText: string) { + const parsed = parseSkillMarkdown(skillText) + if (parsed.hasFrontmatter) { + const title = parsed.name.trim() || "Untitled skill" + const description = parsed.description.trim() || null + + return { + title: title.slice(0, 255), + description: description ? description.slice(0, 65535) : null, + } + } + + const lines = skillText + .split(/\r?\n/g) + .map((line) => line.trim()) + .filter(Boolean) + + const cleanup = (value: string) => value + .replace(/^#{1,6}\s+/, "") + .replace(/^[-*+]\s+/, "") + .replace(/^title\s*:\s*/i, "") + .replace(/^description\s*:\s*/i, "") + .trim() + + const title = cleanup(lines[0] ?? "") || "Untitled skill" + const description = lines.slice(1).map(cleanup).find(Boolean) ?? null + + return { + title: title.slice(0, 255), + description: description ? description.slice(0, 65535) : null, + } +} + +function isOrganizationAdmin(payload: { currentMember: { isOwner: boolean; role: string } }) { + return payload.currentMember.isOwner || memberHasRole(payload.currentMember.role, "admin") +} + +function canManageSkill(payload: { currentMember: { id: MemberId; isOwner: boolean; role: string } }, skill: SkillRow) { + return isOrganizationAdmin(payload) || skill.createdByOrgMembershipId === payload.currentMember.id +} + +function canManageHub(payload: { currentMember: { id: MemberId; isOwner: boolean; role: string } }, skillHub: SkillHubRow) { + return isOrganizationAdmin(payload) || skillHub.createdByOrgMembershipId === payload.currentMember.id +} + +async function listAccessibleHubMemberships(input: { + organizationId: typeof SkillHubTable.$inferSelect.organizationId + currentMemberId: MemberId + memberTeams: Array<{ id: TeamId }> +}) { + const teamIds = input.memberTeams.map((team) => team.id) + const accessWhere = teamIds.length > 0 + ? and( + eq(SkillHubTable.organizationId, input.organizationId), + or( + eq(SkillHubMemberTable.orgMembershipId, input.currentMemberId), + inArray(SkillHubMemberTable.teamId, teamIds), + ), + ) + : and( + eq(SkillHubTable.organizationId, input.organizationId), + eq(SkillHubMemberTable.orgMembershipId, input.currentMemberId), + ) + + return db + .select({ + id: SkillHubMemberTable.id, + skillHubId: SkillHubMemberTable.skillHubId, + orgMembershipId: SkillHubMemberTable.orgMembershipId, + teamId: SkillHubMemberTable.teamId, + createdAt: SkillHubMemberTable.createdAt, + }) + .from(SkillHubMemberTable) + .innerJoin(SkillHubTable, eq(SkillHubMemberTable.skillHubId, SkillHubTable.id)) + .where(accessWhere) +} + +async function listAccessibleSkillIds(input: { + organizationId: typeof SkillHubTable.$inferSelect.organizationId + currentMemberId: MemberId + memberTeams: Array<{ id: TeamId }> +}) { + const memberships = await listAccessibleHubMemberships(input) + const hubIds = [...new Set(memberships.map((membership) => membership.skillHubId))] + if (hubIds.length === 0) { + return new Set() + } + + const rows = await db + .select({ skillId: SkillHubSkillTable.skillId }) + .from(SkillHubSkillTable) + .where(inArray(SkillHubSkillTable.skillHubId, hubIds)) + + return new Set(rows.map((row) => row.skillId)) +} + +function canViewSkill(input: { + currentMemberId: MemberId + skill: SkillRow + accessibleSkillIds: Set +}) { + return input.skill.createdByOrgMembershipId === input.currentMemberId + || input.skill.shared !== null + || input.accessibleSkillIds.has(input.skill.id) +} + +export function registerOrgSkillRoutes }>(app: Hono) { + app.post( + "/v1/skills", + describeRoute({ + tags: ["Skills"], + summary: "Create skill", + description: "Creates a new skill in the organization from markdown content and optional sharing visibility.", + responses: { + 201: jsonResponse("Skill created successfully.", skillResponseSchema), + 400: jsonResponse("The skill creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create skills.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(createSkillSchema), + async (c) => { + const payload = c.get("organizationContext") + const input = c.req.valid("json") + const now = new Date() + const skillId = createDenTypeId("skill") + const metadata = parseSkillMetadata(input.skillText) + + await db.insert(SkillTable).values({ + id: skillId, + organizationId: payload.organization.id, + createdByOrgMembershipId: payload.currentMember.id, + title: metadata.title, + description: metadata.description, + skillText: input.skillText, + shared: input.shared ?? null, + createdAt: now, + updatedAt: now, + }) + + return c.json({ + skill: { + id: skillId, + organizationId: payload.organization.id, + createdByOrgMembershipId: payload.currentMember.id, + title: metadata.title, + description: metadata.description, + skillText: input.skillText, + shared: input.shared ?? null, + createdAt: now, + updatedAt: now, + }, + }, 201) + }, + ) + + app.get( + "/v1/skills", + describeRoute({ + tags: ["Skills"], + summary: "List skills", + description: "Lists the skills the current member can view, including owned skills, shared skills, and skills available through hub access.", + responses: { + 200: jsonResponse("Accessible skills returned successfully.", skillListResponseSchema), + 400: jsonResponse("The skill list path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list skills.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + resolveMemberTeamsMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const memberTeams = c.get("memberTeams") ?? [] + const accessibleSkillIds = await listAccessibleSkillIds({ + organizationId: payload.organization.id, + currentMemberId: payload.currentMember.id, + memberTeams, + }) + + const skills = await db + .select() + .from(SkillTable) + .where(eq(SkillTable.organizationId, payload.organization.id)) + .orderBy(desc(SkillTable.updatedAt)) + + return c.json({ + skills: skills + .filter((skill) => canViewSkill({ + currentMemberId: payload.currentMember.id, + skill, + accessibleSkillIds, + })) + .map((skill) => ({ + ...skill, + canManage: canManageSkill(payload, skill), + })), + }) + }, + ) + + app.delete( + "/v1/skills/:skillId", + describeRoute({ + tags: ["Skills"], + summary: "Delete skill", + description: "Deletes one organization skill when the caller is allowed to manage it.", + responses: { + 204: emptyResponse("Skill deleted successfully."), + 400: jsonResponse("The skill deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete skills.", unauthorizedSchema), + 403: jsonResponse("Only the skill creator or a workspace admin can delete skills.", forbiddenSchema), + 404: jsonResponse("The skill could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let skillId: SkillId + try { + skillId = parseSkillId(params.skillId) + } catch { + return c.json({ error: "skill_not_found" }, 404) + } + + const skillRows = await db + .select() + .from(SkillTable) + .where(and(eq(SkillTable.id, skillId), eq(SkillTable.organizationId, payload.organization.id))) + .limit(1) + + const skill = skillRows[0] + if (!skill) { + return c.json({ error: "skill_not_found" }, 404) + } + + if (!canManageSkill(payload, skill)) { + return c.json({ error: "forbidden", message: "Only the skill creator or a workspace admin can delete skills." }, 403) + } + + await db.transaction(async (tx) => { + await tx.delete(SkillHubSkillTable).where(eq(SkillHubSkillTable.skillId, skill.id)) + await tx.delete(SkillTable).where(eq(SkillTable.id, skill.id)) + }) + + return c.body(null, 204) + }, + ) + + app.patch( + "/v1/skills/:skillId", + describeRoute({ + tags: ["Skills"], + summary: "Update skill", + description: "Updates a skill's markdown content and-or sharing visibility while keeping derived metadata in sync.", + responses: { + 200: jsonResponse("Skill updated successfully.", skillResponseSchema), + 400: jsonResponse("The skill update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update skills.", unauthorizedSchema), + 403: jsonResponse("Only the skill creator or a workspace admin can update skills.", forbiddenSchema), + 404: jsonResponse("The skill could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(updateSkillSchema), + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const input = c.req.valid("json") + + let skillId: SkillId + try { + skillId = parseSkillId(params.skillId) + } catch { + return c.json({ error: "skill_not_found" }, 404) + } + + const skillRows = await db + .select() + .from(SkillTable) + .where(and(eq(SkillTable.id, skillId), eq(SkillTable.organizationId, payload.organization.id))) + .limit(1) + + const skill = skillRows[0] + if (!skill) { + return c.json({ error: "skill_not_found" }, 404) + } + + if (!canManageSkill(payload, skill)) { + return c.json({ error: "forbidden", message: "Only the skill creator or a workspace admin can update skills." }, 403) + } + + const nextSkillText = input.skillText ?? skill.skillText + const metadata = parseSkillMetadata(nextSkillText) + const updatedAt = new Date() + const nextShared = input.shared === undefined ? skill.shared : input.shared + + await db + .update(SkillTable) + .set({ + title: metadata.title, + description: metadata.description, + skillText: nextSkillText, + shared: nextShared, + updatedAt, + }) + .where(eq(SkillTable.id, skill.id)) + + return c.json({ + skill: { + ...skill, + title: metadata.title, + description: metadata.description, + skillText: nextSkillText, + shared: nextShared, + updatedAt, + }, + }) + }, + ) + + app.post( + "/v1/skill-hubs", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Create skill hub", + description: "Creates a skill hub that can group skills and assign access to specific members or teams.", + responses: { + 201: jsonResponse("Skill hub created successfully.", skillHubResponseSchema), + 400: jsonResponse("The skill hub creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create skill hubs.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(createSkillHubSchema), + async (c) => { + const payload = c.get("organizationContext") + const input = c.req.valid("json") + const now = new Date() + const skillHubId = createDenTypeId("skillHub") + + await db.transaction(async (tx) => { + await tx.insert(SkillHubTable).values({ + id: skillHubId, + organizationId: payload.organization.id, + createdByOrgMembershipId: payload.currentMember.id, + name: input.name, + description: input.description, + createdAt: now, + updatedAt: now, + }) + + await tx.insert(SkillHubMemberTable).values({ + id: createDenTypeId("skillHubMember"), + skillHubId, + orgMembershipId: payload.currentMember.id, + teamId: null, + createdAt: now, + }) + }) + + return c.json({ + skillHub: { + id: skillHubId, + organizationId: payload.organization.id, + createdByOrgMembershipId: payload.currentMember.id, + name: input.name, + description: input.description, + createdAt: now, + updatedAt: now, + }, + }, 201) + }, + ) + + app.get( + "/v1/skill-hubs", + describeRoute({ + tags: ["Skill Hubs"], + summary: "List skill hubs", + description: "Lists the skill hubs the current member can access, along with linked skills and access metadata.", + responses: { + 200: jsonResponse("Accessible skill hubs returned successfully.", skillHubListResponseSchema), + 400: jsonResponse("The skill hub list path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list skill hubs.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + resolveMemberTeamsMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const memberTeams = c.get("memberTeams") ?? [] + const accessibleMemberships = await listAccessibleHubMemberships({ + organizationId: payload.organization.id, + currentMemberId: payload.currentMember.id, + memberTeams, + }) + const skillHubIds = [...new Set(accessibleMemberships.map((membership) => membership.skillHubId))] + + if (skillHubIds.length === 0) { + return c.json({ skillHubs: [] }) + } + + const skillHubs = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.organizationId, payload.organization.id), inArray(SkillHubTable.id, skillHubIds))) + .orderBy(desc(SkillHubTable.updatedAt)) + + const skillLinks = await db + .select({ skillHubId: SkillHubSkillTable.skillHubId, skillId: SkillHubSkillTable.skillId }) + .from(SkillHubSkillTable) + .where(inArray(SkillHubSkillTable.skillHubId, skillHubIds)) + + const skillIds = [...new Set(skillLinks.map((link) => link.skillId))] + const skills = skillIds.length === 0 + ? [] + : await db + .select() + .from(SkillTable) + .where(and(eq(SkillTable.organizationId, payload.organization.id), inArray(SkillTable.id, skillIds))) + + const memberAccessRows = await db + .select({ + access: { + id: SkillHubMemberTable.id, + skillHubId: SkillHubMemberTable.skillHubId, + createdAt: SkillHubMemberTable.createdAt, + }, + member: { + id: MemberTable.id, + role: MemberTable.role, + }, + user: { + id: AuthUserTable.id, + name: AuthUserTable.name, + email: AuthUserTable.email, + image: AuthUserTable.image, + }, + }) + .from(SkillHubMemberTable) + .innerJoin(MemberTable, eq(SkillHubMemberTable.orgMembershipId, MemberTable.id)) + .innerJoin(AuthUserTable, eq(MemberTable.userId, AuthUserTable.id)) + .where(and(inArray(SkillHubMemberTable.skillHubId, skillHubIds), isNotNull(SkillHubMemberTable.orgMembershipId))) + + const teamAccessRows = await db + .select({ + access: { + id: SkillHubMemberTable.id, + skillHubId: SkillHubMemberTable.skillHubId, + createdAt: SkillHubMemberTable.createdAt, + }, + team: { + id: TeamTable.id, + name: TeamTable.name, + createdAt: TeamTable.createdAt, + updatedAt: TeamTable.updatedAt, + }, + }) + .from(SkillHubMemberTable) + .innerJoin(TeamTable, eq(SkillHubMemberTable.teamId, TeamTable.id)) + .where(and(inArray(SkillHubMemberTable.skillHubId, skillHubIds), isNotNull(SkillHubMemberTable.teamId))) + + const skillsById = new Map(skills.map((skill) => [skill.id, skill])) + const skillsByHubId = new Map() + for (const link of skillLinks) { + const skill = skillsById.get(link.skillId) + if (!skill) { + continue + } + + const existing = skillsByHubId.get(link.skillHubId) ?? [] + existing.push(skill) + skillsByHubId.set(link.skillHubId, existing) + } + + const memberAccessByHubId = new Map() + for (const row of memberAccessRows) { + const existing = memberAccessByHubId.get(row.access.skillHubId) ?? [] + existing.push(row) + memberAccessByHubId.set(row.access.skillHubId, existing) + } + + const teamAccessByHubId = new Map() + for (const row of teamAccessRows) { + const existing = teamAccessByHubId.get(row.access.skillHubId) ?? [] + existing.push(row) + teamAccessByHubId.set(row.access.skillHubId, existing) + } + + const accessibleViaByHubId = new Map() + for (const row of accessibleMemberships) { + const existing = accessibleViaByHubId.get(row.skillHubId) ?? { orgMembershipIds: [], teamIds: [] } + if (row.orgMembershipId && !existing.orgMembershipIds.includes(row.orgMembershipId)) { + existing.orgMembershipIds.push(row.orgMembershipId) + } + if (row.teamId && !existing.teamIds.includes(row.teamId)) { + existing.teamIds.push(row.teamId) + } + accessibleViaByHubId.set(row.skillHubId, existing) + } + + return c.json({ + skillHubs: skillHubs.map((skillHub) => ({ + ...skillHub, + canManage: canManageHub(payload, skillHub), + accessibleVia: accessibleViaByHubId.get(skillHub.id) ?? { orgMembershipIds: [], teamIds: [] }, + skills: skillsByHubId.get(skillHub.id) ?? [], + access: { + members: (memberAccessByHubId.get(skillHub.id) ?? []).map((row) => ({ + id: row.access.id, + orgMembershipId: row.member.id, + role: row.member.role, + user: row.user, + createdAt: row.access.createdAt, + })), + teams: (teamAccessByHubId.get(skillHub.id) ?? []).map((row) => ({ + id: row.access.id, + teamId: row.team.id, + name: row.team.name, + createdAt: row.team.createdAt, + updatedAt: row.team.updatedAt, + })), + }, + })), + }) + }, + ) + + app.patch( + "/v1/skill-hubs/:skillHubId", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Update skill hub", + description: "Updates a skill hub's display name or description.", + responses: { + 200: jsonResponse("Skill hub updated successfully.", skillHubResponseSchema), + 400: jsonResponse("The skill hub update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update skill hubs.", unauthorizedSchema), + 403: jsonResponse("Only the hub creator or a workspace admin can update skill hubs.", forbiddenSchema), + 404: jsonResponse("The skill hub could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillHubParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(updateSkillHubSchema), + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const input = c.req.valid("json") + + let skillHubId: SkillHubId + try { + skillHubId = parseSkillHubId(params.skillHubId) + } catch { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + const skillHubRows = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.id, skillHubId), eq(SkillHubTable.organizationId, payload.organization.id))) + .limit(1) + + const skillHub = skillHubRows[0] + if (!skillHub) { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + if (!canManageHub(payload, skillHub)) { + return c.json({ error: "forbidden", message: "Only the hub creator or a workspace admin can update hubs." }, 403) + } + + const updatedAt = new Date() + const nextName = input.name ?? skillHub.name + const nextDescription = input.description === undefined ? skillHub.description : input.description + + await db + .update(SkillHubTable) + .set({ + name: nextName, + description: nextDescription, + updatedAt, + }) + .where(eq(SkillHubTable.id, skillHub.id)) + + return c.json({ + skillHub: { + ...skillHub, + name: nextName, + description: nextDescription, + updatedAt, + }, + }) + }, + ) + + app.delete( + "/v1/skill-hubs/:skillHubId", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Delete skill hub", + description: "Deletes a skill hub and removes its access links and skill links.", + responses: { + 204: emptyResponse("Skill hub deleted successfully."), + 400: jsonResponse("The skill hub deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete skill hubs.", unauthorizedSchema), + 403: jsonResponse("Only the hub creator or a workspace admin can delete skill hubs.", forbiddenSchema), + 404: jsonResponse("The skill hub could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillHubParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let skillHubId: SkillHubId + try { + skillHubId = parseSkillHubId(params.skillHubId) + } catch { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + const skillHubRows = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.id, skillHubId), eq(SkillHubTable.organizationId, payload.organization.id))) + .limit(1) + + const skillHub = skillHubRows[0] + if (!skillHub) { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + if (!canManageHub(payload, skillHub)) { + return c.json({ error: "forbidden", message: "Only the hub creator or a workspace admin can delete hubs." }, 403) + } + + await db.transaction(async (tx) => { + await tx.delete(SkillHubMemberTable).where(eq(SkillHubMemberTable.skillHubId, skillHub.id)) + await tx.delete(SkillHubSkillTable).where(eq(SkillHubSkillTable.skillHubId, skillHub.id)) + await tx.delete(SkillHubTable).where(eq(SkillHubTable.id, skillHub.id)) + }) + + return c.body(null, 204) + }, + ) + + app.post( + "/v1/skill-hubs/:skillHubId/skills", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Add skill to skill hub", + description: "Adds an existing organization skill to a skill hub so hub members can discover and use it.", + responses: { + 201: jsonResponse("Skill added to skill hub successfully.", successSchema), + 400: jsonResponse("The add-skill-to-hub request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage skill hub contents.", unauthorizedSchema), + 403: jsonResponse("Only the hub creator or a workspace admin can manage hub skills, and private skills stay creator-controlled.", forbiddenSchema), + 404: jsonResponse("The skill hub or skill could not be found.", notFoundSchema), + 409: jsonResponse("The skill is already attached to the skill hub.", conflictSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillHubParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(addSkillToHubSchema), + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const input = c.req.valid("json") + + let skillHubId: SkillHubId + let skillId: SkillId + try { + skillHubId = parseSkillHubId(params.skillHubId) + skillId = parseSkillId(input.skillId) + } catch { + return c.json({ error: "not_found" }, 404) + } + + const skillHubRows = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.id, skillHubId), eq(SkillHubTable.organizationId, payload.organization.id))) + .limit(1) + + const skillHub = skillHubRows[0] + if (!skillHub) { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + if (!canManageHub(payload, skillHub)) { + return c.json({ error: "forbidden", message: "Only the hub creator or a workspace admin can manage hub skills." }, 403) + } + + const skillRows = await db + .select() + .from(SkillTable) + .where(and(eq(SkillTable.id, skillId), eq(SkillTable.organizationId, payload.organization.id))) + .limit(1) + + const skill = skillRows[0] + if (!skill) { + return c.json({ error: "skill_not_found" }, 404) + } + + if (!canManageSkill(payload, skill) && skill.shared === null) { + return c.json({ + error: "forbidden", + message: "Private skills can only be added to hubs by their creator or a workspace admin.", + }, 403) + } + + const existing = await db + .select({ id: SkillHubSkillTable.id }) + .from(SkillHubSkillTable) + .where(and(eq(SkillHubSkillTable.skillHubId, skillHubId), eq(SkillHubSkillTable.skillId, skill.id))) + .limit(1) + + if (existing[0]) { + return c.json({ error: "skill_hub_skill_exists" }, 409) + } + + await db.insert(SkillHubSkillTable).values({ + id: createDenTypeId("skillHubSkill"), + skillHubId, + skillId: skill.id, + addedByOrgMembershipId: payload.currentMember.id, + createdAt: new Date(), + }) + + return c.json({ success: true }, 201) + }, + ) + + app.delete( + "/v1/skill-hubs/:skillHubId/skills/:skillId", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Remove skill from skill hub", + description: "Removes a skill from a skill hub without deleting the underlying skill itself.", + responses: { + 204: emptyResponse("Skill removed from skill hub successfully."), + 400: jsonResponse("The remove-skill-from-hub path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage skill hub contents.", unauthorizedSchema), + 403: jsonResponse("Only the hub creator or a workspace admin can remove skills from a hub.", forbiddenSchema), + 404: jsonResponse("The skill hub or hub-skill link could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillHubSkillParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let skillHubId: SkillHubId + let skillId: SkillId + try { + skillHubId = parseSkillHubId(params.skillHubId) + skillId = parseSkillId(params.skillId) + } catch { + return c.json({ error: "not_found" }, 404) + } + + const skillHubRows = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.id, skillHubId), eq(SkillHubTable.organizationId, payload.organization.id))) + .limit(1) + + const skillHub = skillHubRows[0] + if (!skillHub) { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + if (!canManageHub(payload, skillHub)) { + return c.json({ error: "forbidden", message: "Only the hub creator or a workspace admin can manage hub skills." }, 403) + } + + const existing = await db + .select({ id: SkillHubSkillTable.id }) + .from(SkillHubSkillTable) + .where(and(eq(SkillHubSkillTable.skillHubId, skillHubId), eq(SkillHubSkillTable.skillId, skillId))) + .limit(1) + + if (!existing[0]) { + return c.json({ error: "skill_hub_skill_not_found" }, 404) + } + + await db + .delete(SkillHubSkillTable) + .where(and(eq(SkillHubSkillTable.skillHubId, skillHubId), eq(SkillHubSkillTable.skillId, skillId))) + + return c.body(null, 204) + }, + ) + + app.post( + "/v1/skill-hubs/:skillHubId/access", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Grant skill hub access", + description: "Grants a specific member or team access to a skill hub.", + responses: { + 201: jsonResponse("Skill hub access granted successfully.", skillHubAccessResponseSchema), + 400: jsonResponse("The skill hub access request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage skill hub access.", unauthorizedSchema), + 403: jsonResponse("Only the hub creator or a workspace admin can grant hub access.", forbiddenSchema), + 404: jsonResponse("The skill hub or access target could not be found.", notFoundSchema), + 409: jsonResponse("The requested access entry already exists.", conflictSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillHubParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(addSkillHubAccessSchema), + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const input = c.req.valid("json") + + let skillHubId: SkillHubId + let orgMembershipId: MemberId | null = null + let teamId: TeamId | null = null + try { + skillHubId = parseSkillHubId(params.skillHubId) + orgMembershipId = input.orgMembershipId ? parseMemberId(input.orgMembershipId) : null + teamId = input.teamId ? parseTeamId(input.teamId) : null + } catch { + return c.json({ error: "access_target_not_found" }, 404) + } + + const skillHubRows = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.id, skillHubId), eq(SkillHubTable.organizationId, payload.organization.id))) + .limit(1) + + const skillHub = skillHubRows[0] + if (!skillHub) { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + if (!canManageHub(payload, skillHub)) { + return c.json({ error: "forbidden", message: "Only the hub creator or a workspace admin can manage access." }, 403) + } + + if (orgMembershipId) { + const memberRows = await db + .select({ id: MemberTable.id }) + .from(MemberTable) + .where(and(eq(MemberTable.id, orgMembershipId), eq(MemberTable.organizationId, payload.organization.id))) + .limit(1) + + if (!memberRows[0]) { + return c.json({ error: "member_not_found" }, 404) + } + } + + if (teamId) { + const teamRows = await db + .select({ id: TeamTable.id }) + .from(TeamTable) + .where(and(eq(TeamTable.id, teamId), eq(TeamTable.organizationId, payload.organization.id))) + .limit(1) + + if (!teamRows[0]) { + return c.json({ error: "team_not_found" }, 404) + } + } + + const existing = await db + .select({ id: SkillHubMemberTable.id }) + .from(SkillHubMemberTable) + .where( + orgMembershipId + ? and(eq(SkillHubMemberTable.skillHubId, skillHubId), eq(SkillHubMemberTable.orgMembershipId, orgMembershipId)) + : and(eq(SkillHubMemberTable.skillHubId, skillHubId), eq(SkillHubMemberTable.teamId, teamId as TeamId)), + ) + .limit(1) + + if (existing[0]) { + return c.json({ error: "skill_hub_access_exists" }, 409) + } + + const accessId = createDenTypeId("skillHubMember") + const createdAt = new Date() + + await db.insert(SkillHubMemberTable).values({ + id: accessId, + skillHubId, + orgMembershipId, + teamId, + createdAt, + }) + + return c.json({ + access: { + id: accessId, + skillHubId, + orgMembershipId, + teamId, + createdAt, + }, + }, 201) + }, + ) + + app.delete( + "/v1/skill-hubs/:skillHubId/access/:accessId", + describeRoute({ + tags: ["Skill Hubs"], + summary: "Revoke skill hub access", + description: "Revokes one member or team access entry from a skill hub.", + responses: { + 204: emptyResponse("Skill hub access removed successfully."), + 400: jsonResponse("The skill hub access deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to manage skill hub access.", unauthorizedSchema), + 403: jsonResponse("Only the hub creator or a workspace admin can revoke hub access.", forbiddenSchema), + 404: jsonResponse("The skill hub or access entry could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgSkillHubAccessParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let skillHubId: SkillHubId + let accessId: SkillHubMemberId + try { + skillHubId = parseSkillHubId(params.skillHubId) + accessId = parseSkillHubMemberId(params.accessId) + } catch { + return c.json({ error: "not_found" }, 404) + } + + const skillHubRows = await db + .select() + .from(SkillHubTable) + .where(and(eq(SkillHubTable.id, skillHubId), eq(SkillHubTable.organizationId, payload.organization.id))) + .limit(1) + + const skillHub = skillHubRows[0] + if (!skillHub) { + return c.json({ error: "skill_hub_not_found" }, 404) + } + + if (!canManageHub(payload, skillHub)) { + return c.json({ error: "forbidden", message: "Only the hub creator or a workspace admin can manage access." }, 403) + } + + const accessRows = await db + .select() + .from(SkillHubMemberTable) + .where(and(eq(SkillHubMemberTable.id, accessId), eq(SkillHubMemberTable.skillHubId, skillHubId))) + .limit(1) + + const access = accessRows[0] + if (!access) { + return c.json({ error: "skill_hub_access_not_found" }, 404) + } + + await db.delete(SkillHubMemberTable).where(eq(SkillHubMemberTable.id, access.id)) + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/org/teams.ts b/ee/apps/den-api/src/routes/org/teams.ts new file mode 100644 index 0000000000..2dd34454fb --- /dev/null +++ b/ee/apps/den-api/src/routes/org/teams.ts @@ -0,0 +1,331 @@ +import { and, eq } from "@openwork-ee/den-db/drizzle" +import { + MemberTable, + SkillHubMemberTable, + TeamMemberTable, + TeamTable, +} from "@openwork-ee/den-db/schema" +import { createDenTypeId, normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { + jsonValidator, + paramValidator, + requireUserMiddleware, + resolveOrganizationContextMiddleware, +} from "../../middleware/index.js" +import { denTypeIdSchema, emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import type { OrgRouteVariables } from "./shared.js" +import { + ensureTeamManager, + idParamSchema, +} from "./shared.js" + +const createTeamSchema = z.object({ + name: z.string().trim().min(1).max(255), + memberIds: z.array(denTypeIdSchema("member")).optional().default([]), +}) + +const updateTeamSchema = z.object({ + name: z.string().trim().min(1).max(255).optional(), + memberIds: z.array(denTypeIdSchema("member")).optional(), +}).superRefine((value, ctx) => { + if (value.name === undefined && value.memberIds === undefined) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["name"], + message: "Provide at least one field to update.", + }) + } +}) + +type TeamId = typeof TeamTable.$inferSelect.id +type MemberId = typeof MemberTable.$inferSelect.id + +const orgTeamParamsSchema = idParamSchema("teamId", "team") + +const teamResponseSchema = z.object({ + team: z.object({ + id: denTypeIdSchema("team"), + organizationId: denTypeIdSchema("organization"), + name: z.string(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), + memberIds: z.array(denTypeIdSchema("member")), + }), +}).meta({ ref: "TeamResponse" }) + +function parseTeamId(value: string) { + return normalizeDenTypeId("team", value) +} + +function parseMemberIds(memberIds: string[]) { + return [...new Set(memberIds.map((value) => normalizeDenTypeId("member", value)))] +} + +async function ensureMembersBelongToOrganization(input: { + organizationId: typeof TeamTable.$inferSelect.organizationId + memberIds: MemberId[] +}) { + if (input.memberIds.length === 0) { + return true + } + + const rows = await db + .select({ id: MemberTable.id }) + .from(MemberTable) + .where(eq(MemberTable.organizationId, input.organizationId)) + + const memberIds = new Set(rows.map((row) => row.id)) + return input.memberIds.every((memberId) => memberIds.has(memberId)) +} + +export function registerOrgTeamRoutes(app: Hono) { + app.post( + "/v1/teams", + describeRoute({ + tags: ["Teams"], + summary: "Create team", + description: "Creates a team inside an organization and can optionally attach existing organization members to it.", + responses: { + 201: jsonResponse("Team created successfully.", teamResponseSchema), + 400: jsonResponse("The team creation request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to create teams.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners and admins can create teams.", forbiddenSchema), + 404: jsonResponse("The organization or a referenced member could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveOrganizationContextMiddleware, + jsonValidator(createTeamSchema), + async (c) => { + const permission = ensureTeamManager(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const input = c.req.valid("json") + + let memberIds: MemberId[] + try { + memberIds = parseMemberIds(input.memberIds) + } catch { + return c.json({ error: "member_not_found" }, 404) + } + + const membersBelongToOrg = await ensureMembersBelongToOrganization({ + organizationId: payload.organization.id, + memberIds, + }) + if (!membersBelongToOrg) { + return c.json({ error: "member_not_found" }, 404) + } + + const existingTeam = await db + .select({ id: TeamTable.id }) + .from(TeamTable) + .where(and(eq(TeamTable.organizationId, payload.organization.id), eq(TeamTable.name, input.name))) + .limit(1) + + if (existingTeam[0]) { + return c.json({ error: "team_exists", message: "That team already exists in this organization." }, 409) + } + + const teamId = createDenTypeId("team") + const now = new Date() + + await db.transaction(async (tx) => { + await tx.insert(TeamTable).values({ + id: teamId, + name: input.name, + organizationId: payload.organization.id, + createdAt: now, + updatedAt: now, + }) + + if (memberIds.length > 0) { + await tx.insert(TeamMemberTable).values( + memberIds.map((memberId) => ({ + id: createDenTypeId("teamMember"), + teamId, + orgMembershipId: memberId, + createdAt: now, + })), + ) + } + }) + + return c.json({ + team: { + id: teamId, + organizationId: payload.organization.id, + name: input.name, + createdAt: now, + updatedAt: now, + memberIds, + }, + }, 201) + }, + ) + + app.patch( + "/v1/teams/:teamId", + describeRoute({ + tags: ["Teams"], + summary: "Update team", + description: "Updates a team's name and-or membership list within an organization.", + responses: { + 200: jsonResponse("Team updated successfully.", teamResponseSchema), + 400: jsonResponse("The team update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update teams.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners and admins can update teams.", forbiddenSchema), + 404: jsonResponse("The team, organization, or a referenced member could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgTeamParamsSchema), + resolveOrganizationContextMiddleware, + jsonValidator(updateTeamSchema), + async (c) => { + const permission = ensureTeamManager(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const params = c.req.valid("param") + const input = c.req.valid("json") + + let teamId: TeamId + try { + teamId = parseTeamId(params.teamId) + } catch { + return c.json({ error: "team_not_found" }, 404) + } + + const teamRows = await db + .select() + .from(TeamTable) + .where(and(eq(TeamTable.id, teamId), eq(TeamTable.organizationId, payload.organization.id))) + .limit(1) + + const team = teamRows[0] + if (!team) { + return c.json({ error: "team_not_found" }, 404) + } + + let memberIds: MemberId[] | undefined + if (input.memberIds) { + try { + memberIds = parseMemberIds(input.memberIds) + } catch { + return c.json({ error: "member_not_found" }, 404) + } + + const membersBelongToOrg = await ensureMembersBelongToOrganization({ + organizationId: payload.organization.id, + memberIds, + }) + if (!membersBelongToOrg) { + return c.json({ error: "member_not_found" }, 404) + } + } + + const nextName = input.name ?? team.name + const duplicate = await db + .select({ id: TeamTable.id }) + .from(TeamTable) + .where(and(eq(TeamTable.organizationId, payload.organization.id), eq(TeamTable.name, nextName))) + .limit(1) + + if (duplicate[0] && duplicate[0].id !== team.id) { + return c.json({ error: "team_exists", message: "That team already exists in this organization." }, 409) + } + + const updatedAt = new Date() + await db.transaction(async (tx) => { + await tx.update(TeamTable).set({ name: nextName, updatedAt }).where(eq(TeamTable.id, team.id)) + + if (memberIds) { + await tx.delete(TeamMemberTable).where(eq(TeamMemberTable.teamId, team.id)) + if (memberIds.length > 0) { + await tx.insert(TeamMemberTable).values( + memberIds.map((memberId) => ({ + id: createDenTypeId("teamMember"), + teamId: team.id, + orgMembershipId: memberId, + createdAt: updatedAt, + })), + ) + } + } + }) + + return c.json({ + team: { + ...team, + name: nextName, + updatedAt, + memberIds: memberIds ?? [], + }, + }) + }, + ) + + app.delete( + "/v1/teams/:teamId", + describeRoute({ + tags: ["Teams"], + summary: "Delete team", + description: "Deletes a team and removes its related hub-access and team-membership records.", + responses: { + 204: emptyResponse("Team deleted successfully."), + 400: jsonResponse("The team deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete teams.", unauthorizedSchema), + 403: jsonResponse("Only workspace owners and admins can delete teams.", forbiddenSchema), + 404: jsonResponse("The team or organization could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + paramValidator(orgTeamParamsSchema), + resolveOrganizationContextMiddleware, + async (c) => { + const permission = ensureTeamManager(c) + if (!permission.ok) { + return c.json(permission.response, 403) + } + + const payload = c.get("organizationContext") + const params = c.req.valid("param") + + let teamId: TeamId + try { + teamId = parseTeamId(params.teamId) + } catch { + return c.json({ error: "team_not_found" }, 404) + } + + const teamRows = await db + .select() + .from(TeamTable) + .where(and(eq(TeamTable.id, teamId), eq(TeamTable.organizationId, payload.organization.id))) + .limit(1) + + const team = teamRows[0] + if (!team) { + return c.json({ error: "team_not_found" }, 404) + } + + await db.transaction(async (tx) => { + await tx.delete(SkillHubMemberTable).where(eq(SkillHubMemberTable.teamId, team.id)) + await tx.delete(TeamMemberTable).where(eq(TeamMemberTable.teamId, team.id)) + await tx.delete(TeamTable).where(eq(TeamTable.id, team.id)) + }) + + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/version/README.md b/ee/apps/den-api/src/routes/version/README.md new file mode 100644 index 0000000000..b2ddf7ca86 --- /dev/null +++ b/ee/apps/den-api/src/routes/version/README.md @@ -0,0 +1,3 @@ +# Version Routes + +This folder contains public Den API routes that expose app version metadata for desktop clients. diff --git a/ee/apps/den-api/src/routes/version/index.ts b/ee/apps/den-api/src/routes/version/index.ts new file mode 100644 index 0000000000..9709889fc3 --- /dev/null +++ b/ee/apps/den-api/src/routes/version/index.ts @@ -0,0 +1,27 @@ +import type { Env, Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { jsonResponse } from "../../openapi.js" +import { denApiAppVersion } from "../../version.js" + +const appVersionResponseSchema = z.object({ + minAppVersion: z.string(), + latestAppVersion: z.string().min(1), +}).meta({ ref: "DenAppVersionResponse" }) + +export function registerVersionRoutes(app: Hono) { + app.get( + "/v1/app-version", + describeRoute({ + tags: ["System"], + summary: "Get desktop app version metadata", + description: "Returns the minimum supported desktop app version and the latest desktop app version published with this Den API build.", + responses: { + 200: jsonResponse("Desktop app version metadata returned successfully.", appVersionResponseSchema), + }, + }), + (c) => { + return c.json(denApiAppVersion) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/webhooks/github.ts b/ee/apps/den-api/src/routes/webhooks/github.ts new file mode 100644 index 0000000000..13d5d22906 --- /dev/null +++ b/ee/apps/den-api/src/routes/webhooks/github.ts @@ -0,0 +1,100 @@ +import { createHmac, timingSafeEqual } from "node:crypto" +import type { Env, Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { env } from "../../env.js" +import { emptyResponse, jsonResponse } from "../../openapi.js" +import { enqueueGithubWebhookSync } from "../org/plugin-system/store.js" +import { + githubWebhookAcceptedResponseSchema, + githubWebhookIgnoredResponseSchema, + githubWebhookUnauthorizedResponseSchema, +} from "../org/plugin-system/schemas.js" +import { pluginArchRoutePaths } from "../org/plugin-system/contracts.js" + +export function signGithubBody(rawBody: string, secret: string) { + return `sha256=${createHmac("sha256", secret).update(rawBody).digest("hex")}` +} + +export function safeCompareGithubSignature(received: string, expected: string) { + const encoder = new TextEncoder() + const receivedBuffer = encoder.encode(received) + const expectedBuffer = encoder.encode(expected) + if (receivedBuffer.length !== expectedBuffer.length) { + return false + } + return timingSafeEqual(receivedBuffer, expectedBuffer) +} + +export function registerGithubWebhookRoutes(app: Hono) { + app.post( + pluginArchRoutePaths.githubWebhookIngress, + describeRoute({ + tags: ["Webhooks"], + summary: "GitHub webhook ingress", + description: "Verifies a GitHub App webhook signature against the raw request body, then records any relevant sync work.", + responses: { + 200: jsonResponse("Ignored but valid GitHub webhook delivery.", githubWebhookIgnoredResponseSchema), + 202: jsonResponse("Accepted GitHub webhook delivery.", githubWebhookAcceptedResponseSchema), + 401: jsonResponse("Invalid GitHub webhook signature.", githubWebhookUnauthorizedResponseSchema), + 503: emptyResponse("GitHub webhook secret is not configured."), + }, + }), + async (c) => { + const secret = env.githubConnectorApp.webhookSecret + if (!secret) { + return c.body(null, 503) + } + + const rawBody = await c.req.raw.text() + const signature = c.req.raw.headers.get("x-hub-signature-256")?.trim() ?? "" + if (!signature) { + return c.json({ ok: false, error: "invalid signature" }, 401) + } + + const expected = signGithubBody(rawBody, secret) + if (!safeCompareGithubSignature(signature, expected)) { + return c.json({ ok: false, error: "invalid signature" }, 401) + } + + const event = c.req.raw.headers.get("x-github-event")?.trim() ?? "" + const deliveryId = c.req.raw.headers.get("x-github-delivery")?.trim() ?? "" + if (!event || !deliveryId) { + return c.json({ ok: true, accepted: false, reason: "event ignored" }, 200) + } + + const normalizedEvent = event === "push" || event === "installation" || event === "installation_repositories" || event === "repository" + ? event + : null + if (!normalizedEvent) { + return c.json({ ok: true, accepted: false, reason: "event ignored" }, 200) + } + + const payload = JSON.parse(rawBody) as Record + const installationId = payload.installation && typeof payload.installation === "object" && typeof (payload.installation as Record).id === "number" + ? (payload.installation as Record).id as number + : undefined + const repository = payload.repository && typeof payload.repository === "object" ? payload.repository as Record : null + const repositoryFullName = typeof repository?.full_name === "string" ? repository.full_name : undefined + const repositoryId = typeof repository?.id === "number" ? repository.id : undefined + const ref = typeof payload.ref === "string" ? payload.ref : undefined + const headSha = typeof payload.after === "string" ? payload.after : undefined + + const accepted = await enqueueGithubWebhookSync({ + deliveryId, + event: normalizedEvent, + headSha, + installationId, + payload, + ref, + repositoryFullName, + repositoryId, + }) + + if (!accepted.accepted) { + return c.json({ ok: true, accepted: false, reason: accepted.reason }, 200) + } + + return c.json({ ok: true, accepted: true, deliveryId, event: normalizedEvent, queued: accepted.queued }, 202) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/webhooks/index.ts b/ee/apps/den-api/src/routes/webhooks/index.ts new file mode 100644 index 0000000000..ac3643fb32 --- /dev/null +++ b/ee/apps/den-api/src/routes/webhooks/index.ts @@ -0,0 +1,6 @@ +import type { Env, Hono } from "hono" +import { registerGithubWebhookRoutes } from "./github.js" + +export function registerWebhookRoutes(app: Hono) { + registerGithubWebhookRoutes(app) +} diff --git a/ee/apps/den-api/src/routes/workers/README.md b/ee/apps/den-api/src/routes/workers/README.md new file mode 100644 index 0000000000..55c5043621 --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/README.md @@ -0,0 +1,24 @@ +# Worker Routes + +This folder owns worker lifecycle, runtime, billing, and heartbeat routes. + +## Files + +- `index.ts`: registers all worker route groups +- `activity.ts`: unauthenticated worker heartbeat endpoint authenticated by worker activity token +- `billing.ts`: user-facing cloud worker billing endpoints +- `core.ts`: list/create/get/update/delete worker routes and token lookup +- `runtime.ts`: worker runtime inspection and upgrade passthrough endpoints +- `shared.ts`: worker schemas, helper functions, response mapping, and shared DB/runtime utilities + +## Middleware expectations + +- Most worker routes use `requireUserMiddleware` +- Org-scoped worker routes should use `resolveUserOrganizationsMiddleware` to determine the current active org +- Request payloads, params, and query flags should use Hono Zod validators from `src/middleware/index.ts` + +## Notes + +- Activity heartbeat is the exception: it uses worker tokens instead of user auth +- Runtime endpoints proxy to the worker runtime using stored host tokens and instance URLs +- Provisioning logic lives in `src/workers/`, not in the route handlers themselves diff --git a/ee/apps/den-api/src/routes/workers/activity.ts b/ee/apps/den-api/src/routes/workers/activity.ts new file mode 100644 index 0000000000..fe59220f62 --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/activity.ts @@ -0,0 +1,118 @@ +import { and, eq, isNull } from "@openwork-ee/den-db/drizzle" +import { WorkerTable, WorkerTokenTable } from "@openwork-ee/den-db/schema" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { jsonValidator, paramValidator } from "../../middleware/index.js" +import { invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import { + activityHeartbeatSchema, + newerDate, + parseHeartbeatTimestamp, + parseWorkerIdParam, + readBearerToken, + workerIdParamSchema, + type WorkerRouteVariables, +} from "./shared.js" + +const workerHeartbeatResponseSchema = z.object({ + ok: z.literal(true), + workerId: z.string(), + isActiveRecently: z.boolean(), + openSessionCount: z.number().int().nullable(), + lastHeartbeatAt: z.string().datetime(), + lastActiveAt: z.string().datetime().nullable(), +}).meta({ ref: "WorkerHeartbeatResponse" }) + +export function registerWorkerActivityRoutes(app: Hono) { + app.post( + "/v1/workers/:id/activity-heartbeat", + describeRoute({ + tags: ["Workers", "Worker Activity"], + summary: "Record worker heartbeat", + description: "Accepts signed heartbeat and recent-activity updates from a worker so Den can track worker health and recent usage.", + responses: { + 200: jsonResponse("Worker heartbeat accepted successfully.", workerHeartbeatResponseSchema), + 400: jsonResponse("The heartbeat payload or worker path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The worker heartbeat token was missing or invalid.", unauthorizedSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + }, + }), + paramValidator(workerIdParamSchema), + jsonValidator(activityHeartbeatSchema), + async (c) => { + const params = c.req.valid("param") + const body = c.req.valid("json") + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const authorization = + readBearerToken(c.req.header("authorization") ?? undefined) ?? + (c.req.header("x-den-worker-heartbeat-token")?.trim() || null) + + if (!authorization) { + return c.json({ error: "unauthorized" }, 401) + } + + const tokenRows = await db + .select({ id: WorkerTokenTable.id }) + .from(WorkerTokenTable) + .where( + and( + eq(WorkerTokenTable.worker_id, workerId), + eq(WorkerTokenTable.scope, "activity"), + eq(WorkerTokenTable.token, authorization), + isNull(WorkerTokenTable.revoked_at), + ), + ) + .limit(1) + + if (tokenRows.length === 0) { + return c.json({ error: "unauthorized" }, 401) + } + + const workerRows = await db + .select() + .from(WorkerTable) + .where(eq(WorkerTable.id, workerId)) + .limit(1) + + const worker = workerRows[0] + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + const heartbeatAt = parseHeartbeatTimestamp(body.sentAt) ?? new Date() + const requestedActivityAt = parseHeartbeatTimestamp(body.lastActivityAt ?? null) + const activityAt = body.isActiveRecently ? (requestedActivityAt ?? heartbeatAt) : null + + const nextHeartbeatAt = newerDate(worker.last_heartbeat_at, heartbeatAt) + const nextActiveAt = body.isActiveRecently + ? newerDate(worker.last_active_at, activityAt) + : worker.last_active_at + + await db + .update(WorkerTable) + .set({ + last_heartbeat_at: nextHeartbeatAt, + last_active_at: nextActiveAt, + }) + .where(eq(WorkerTable.id, workerId)) + + return c.json({ + ok: true, + workerId, + isActiveRecently: body.isActiveRecently, + openSessionCount: body.openSessionCount ?? null, + lastHeartbeatAt: nextHeartbeatAt, + lastActiveAt: nextActiveAt, + }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/workers/billing.ts b/ee/apps/den-api/src/routes/workers/billing.ts new file mode 100644 index 0000000000..a63e32e71b --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/billing.ts @@ -0,0 +1,126 @@ +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { env } from "../../env.js" +import { jsonValidator, queryValidator, requireUserMiddleware } from "../../middleware/index.js" +import { invalidRequestSchema, jsonResponse, unauthorizedSchema } from "../../openapi.js" +import { getRequiredUserEmail } from "../../user.js" +import type { WorkerRouteVariables } from "./shared.js" +import { billingQuerySchema, billingSubscriptionSchema, getWorkerBilling, setWorkerBillingSubscription, queryIncludesFlag } from "./shared.js" + +const workerBillingPayloadSchema = z.object({ + status: z.string(), + featureGateEnabled: z.boolean(), + productId: z.string().nullable().optional(), + benefitId: z.string().nullable().optional(), +}).passthrough() + +const workerBillingResponseSchema = z.object({ + billing: workerBillingPayloadSchema, +}).meta({ ref: "WorkerBillingResponse" }) + +const workerBillingSubscriptionResponseSchema = z.object({ + subscription: z.object({}).passthrough(), + billing: workerBillingPayloadSchema, +}).meta({ ref: "WorkerBillingSubscriptionResponse" }) + +const userEmailRequiredSchema = z.object({ + error: z.literal("user_email_required"), +}).meta({ ref: "UserEmailRequiredError" }) + +export function registerWorkerBillingRoutes(app: Hono) { + app.get( + "/v1/workers/billing", + describeRoute({ + tags: ["Workers"], + hide: true, + summary: "Get worker billing status", + description: "Returns billing and subscription status for the signed-in user's cloud worker access.", + responses: { + 200: jsonResponse("Worker billing status returned successfully.", workerBillingResponseSchema), + 400: jsonResponse("The billing query parameters were invalid or the user is missing an email.", z.union([invalidRequestSchema, userEmailRequiredSchema])), + 401: jsonResponse("The caller must be signed in to read billing status.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + queryValidator(billingQuerySchema), + async (c) => { + const user = c.get("user") + const query = c.req.valid("query") + const email = getRequiredUserEmail(user) + + if (!email) { + return c.json({ error: "user_email_required" }, 400) + } + + const billing = await getWorkerBilling({ + userId: user.id, + email, + name: user.name ?? user.email ?? "OpenWork User", + includeCheckoutUrl: queryIncludesFlag(query.includeCheckout), + includePortalUrl: !queryIncludesFlag(query.excludePortal), + includeInvoices: !queryIncludesFlag(query.excludeInvoices), + }) + + return c.json({ + billing: { + ...billing, + productId: env.polar.productId, + benefitId: env.polar.benefitId, + }, + }) + }, + ) + + app.post( + "/v1/workers/billing/subscription", + describeRoute({ + tags: ["Workers"], + hide: true, + summary: "Update worker subscription settings", + description: "Updates whether the user's cloud worker subscription should cancel at the end of the current billing period.", + responses: { + 200: jsonResponse("Worker subscription settings updated successfully.", workerBillingSubscriptionResponseSchema), + 400: jsonResponse("The subscription update payload was invalid or the user is missing an email.", z.union([invalidRequestSchema, userEmailRequiredSchema])), + 401: jsonResponse("The caller must be signed in to update billing settings.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + jsonValidator(billingSubscriptionSchema), + async (c) => { + const user = c.get("user") + const input = c.req.valid("json") + const email = getRequiredUserEmail(user) + + if (!email) { + return c.json({ error: "user_email_required" }, 400) + } + + const billingInput = { + userId: user.id, + email, + name: user.name ?? user.email ?? "OpenWork User", + } + + const subscription = await setWorkerBillingSubscription({ + ...billingInput, + cancelAtPeriodEnd: input.cancelAtPeriodEnd, + }) + const billing = await getWorkerBilling({ + ...billingInput, + includeCheckoutUrl: false, + includePortalUrl: true, + includeInvoices: true, + }) + + return c.json({ + subscription, + billing: { + ...billing, + productId: env.polar.productId, + benefitId: env.polar.benefitId, + }, + }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/workers/core.ts b/ee/apps/den-api/src/routes/workers/core.ts new file mode 100644 index 0000000000..fcd7c408c2 --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/core.ts @@ -0,0 +1,485 @@ +import { desc, eq } from "@openwork-ee/den-db/drizzle" +import { WorkerTable, WorkerTokenTable } from "@openwork-ee/den-db/schema" +import { createDenTypeId } from "@openwork-ee/utils/typeid" +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { db } from "../../db.js" +import { jsonValidator, paramValidator, queryValidator, requireUserMiddleware, resolveUserOrganizationsMiddleware } from "../../middleware/index.js" +import { denTypeIdSchema, emptyResponse, forbiddenSchema, invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import { getOrganizationLimitStatus } from "../../organization-limits.js" +import type { WorkerRouteVariables } from "./shared.js" +import { + continueCloudProvisioning, + createWorkerSchema, + deleteWorkerCascade, + getLatestWorkerInstance, + getWorkerByIdForOrg, + getWorkerTokensAndConnect, + listWorkersQuerySchema, + parseWorkerIdParam, + toInstanceResponse, + toWorkerResponse, + token, + updateWorkerSchema, + workerIdParamSchema, +} from "./shared.js" + +const workerInstanceSchema = z.object({ + provider: z.string(), + region: z.string().nullable(), + url: z.string().nullable(), + status: z.string(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), +}).nullable().meta({ ref: "WorkerInstance" }) + +const workerSchema = z.object({ + id: denTypeIdSchema("worker"), + orgId: denTypeIdSchema("organization"), + createdByUserId: denTypeIdSchema("user").nullable(), + isMine: z.boolean(), + name: z.string(), + description: z.string().nullable(), + destination: z.string(), + status: z.string(), + imageVersion: z.string().nullable(), + workspacePath: z.string().nullable(), + sandboxBackend: z.string().nullable(), + lastHeartbeatAt: z.string().datetime().nullable(), + lastActiveAt: z.string().datetime().nullable(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), +}).meta({ ref: "Worker" }) + +const workerListResponseSchema = z.object({ + workers: z.array(z.object({ + instance: workerInstanceSchema, + }).merge(workerSchema)), +}).meta({ ref: "WorkerListResponse" }) + +const workerResponseSchema = z.object({ + worker: workerSchema, + instance: workerInstanceSchema, +}).meta({ ref: "WorkerResponse" }) + +const workerCreateResponseSchema = z.object({ + worker: workerSchema, + tokens: z.object({ + owner: z.string(), + host: z.string(), + client: z.string(), + }), + instance: workerInstanceSchema, + launch: z.object({ + mode: z.string(), + pollAfterMs: z.number().int(), + }), +}).meta({ ref: "WorkerCreateResponse" }) + +const workerTokensResponseSchema = z.object({ + tokens: z.object({ + owner: z.string(), + host: z.string(), + client: z.string(), + }), + connect: z.object({ + openworkUrl: z.string().nullable(), + workspaceId: z.string().nullable(), + }).nullable(), +}).meta({ ref: "WorkerTokensResponse" }) + +const organizationUnavailableSchema = z.object({ + error: z.literal("organization_unavailable"), +}).meta({ ref: "OrganizationUnavailableError" }) + +const workspacePathRequiredSchema = z.object({ + error: z.literal("workspace_path_required"), +}).meta({ ref: "WorkspacePathRequiredError" }) + +const orgLimitReachedSchema = z.object({ + error: z.literal("org_limit_reached"), + limitType: z.literal("workers"), + limit: z.number().int(), + currentCount: z.number().int(), + message: z.string(), +}).meta({ ref: "WorkerOrgLimitReachedError" }) + +const workerRuntimeUnavailableSchema = z.object({ + error: z.literal("worker_tokens_unavailable"), + message: z.string(), +}).or(z.object({ + error: z.literal("worker_runtime_unavailable"), + message: z.string(), +})).meta({ ref: "WorkerConnectionError" }) + +export function registerWorkerCoreRoutes(app: Hono) { + app.get( + "/v1/workers", + describeRoute({ + tags: ["Workers"], + summary: "List workers", + description: "Lists the workers that belong to the caller's active organization, including each worker's latest known instance state.", + responses: { + 200: jsonResponse("Workers returned successfully.", workerListResponseSchema), + 400: jsonResponse("The worker list query parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to list workers.", unauthorizedSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + queryValidator(listWorkersQuerySchema), + async (c) => { + const user = c.get("user") + const orgId = c.get("activeOrganizationId") + const query = c.req.valid("query") + + if (!orgId) { + return c.json({ workers: [] }) + } + + const rows = await db + .select() + .from(WorkerTable) + .where(eq(WorkerTable.org_id, orgId)) + .orderBy(desc(WorkerTable.created_at)) + .limit(query.limit) + + const workers = await Promise.all( + rows.map(async (row) => { + const instance = await getLatestWorkerInstance(row.id) + return { + ...toWorkerResponse(row, user.id), + instance: toInstanceResponse(instance), + } + }), + ) + + return c.json({ workers }) + }, + ) + + app.post( + "/v1/workers", + describeRoute({ + tags: ["Workers"], + summary: "Create worker", + description: "Creates a local or cloud worker for the active organization and returns the initial tokens needed to connect to it.", + responses: { + 201: jsonResponse("Local worker created successfully.", workerCreateResponseSchema), + 202: jsonResponse("Cloud worker creation started successfully.", workerCreateResponseSchema), + 400: jsonResponse("The worker creation payload was invalid.", z.union([invalidRequestSchema, organizationUnavailableSchema, workspacePathRequiredSchema])), + 401: jsonResponse("The caller must be signed in to create workers.", unauthorizedSchema), + 409: jsonResponse("The organization has reached its worker limit.", orgLimitReachedSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + jsonValidator(createWorkerSchema), + async (c) => { + const user = c.get("user") + const orgId = c.get("activeOrganizationId") + const input = c.req.valid("json") + + if (!orgId) { + return c.json({ error: "organization_unavailable" }, 400) + } + + if (input.destination === "local" && !input.workspacePath) { + return c.json({ error: "workspace_path_required" }, 400) + } + + if (input.destination === "cloud") { + const workerLimit = await getOrganizationLimitStatus(orgId, "workers") + if (workerLimit.exceeded) { + return c.json({ + error: "org_limit_reached", + limitType: "workers", + limit: workerLimit.limit, + currentCount: workerLimit.currentCount, + message: `This workspace currently supports up to ${workerLimit.limit} workers. Contact support to increase the limit.`, + }, 409) + } + } + + const workerId = createDenTypeId("worker") + const workerStatus = input.destination === "cloud" ? "provisioning" : "healthy" + + await db.insert(WorkerTable).values({ + id: workerId, + org_id: orgId, + created_by_user_id: user.id, + name: input.name, + description: input.description, + destination: input.destination, + status: workerStatus, + image_version: input.imageVersion, + workspace_path: input.workspacePath, + sandbox_backend: input.sandboxBackend, + }) + + const hostToken = token() + const clientToken = token() + const activityToken = token() + await db.insert(WorkerTokenTable).values([ + { + id: createDenTypeId("workerToken"), + worker_id: workerId, + scope: "host", + token: hostToken, + }, + { + id: createDenTypeId("workerToken"), + worker_id: workerId, + scope: "client", + token: clientToken, + }, + { + id: createDenTypeId("workerToken"), + worker_id: workerId, + scope: "activity", + token: activityToken, + }, + ]) + + if (input.destination === "cloud") { + void continueCloudProvisioning({ + workerId, + name: input.name, + hostToken, + clientToken, + activityToken, + }) + } + + return c.json({ + worker: toWorkerResponse( + { + id: workerId, + org_id: orgId, + created_by_user_id: user.id, + name: input.name, + description: input.description ?? null, + destination: input.destination, + status: workerStatus, + image_version: input.imageVersion ?? null, + workspace_path: input.workspacePath ?? null, + sandbox_backend: input.sandboxBackend ?? null, + last_heartbeat_at: null, + last_active_at: null, + created_at: new Date(), + updated_at: new Date(), + }, + user.id, + ), + tokens: { + owner: hostToken, + host: hostToken, + client: clientToken, + }, + instance: null, + launch: input.destination === "cloud" ? { mode: "async", pollAfterMs: 5000 } : { mode: "instant", pollAfterMs: 0 }, + }, input.destination === "cloud" ? 202 : 201) + }, + ) + + app.get( + "/v1/workers/:id", + describeRoute({ + tags: ["Workers"], + summary: "Get worker", + description: "Returns one worker from the active organization together with its latest provisioned instance details.", + responses: { + 200: jsonResponse("Worker returned successfully.", workerResponseSchema), + 400: jsonResponse("The worker path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to read worker details.", unauthorizedSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + paramValidator(workerIdParamSchema), + async (c) => { + const user = c.get("user") + const orgId = c.get("activeOrganizationId") + const params = c.req.valid("param") + + if (!orgId) { + return c.json({ error: "worker_not_found" }, 404) + } + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const worker = await getWorkerByIdForOrg(workerId, orgId) + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + const instance = await getLatestWorkerInstance(worker.id) + + return c.json({ + worker: toWorkerResponse(worker, user.id), + instance: toInstanceResponse(instance), + }) + }, + ) + + app.patch( + "/v1/workers/:id", + describeRoute({ + tags: ["Workers"], + summary: "Update worker", + description: "Renames a worker, but only when the caller is the user who originally created that worker.", + responses: { + 200: jsonResponse("Worker updated successfully.", z.object({ worker: workerSchema }).meta({ ref: "WorkerUpdateResponse" })), + 400: jsonResponse("The worker update request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to update workers.", unauthorizedSchema), + 403: jsonResponse("Only the worker owner can rename this worker.", forbiddenSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + paramValidator(workerIdParamSchema), + jsonValidator(updateWorkerSchema), + async (c) => { + const user = c.get("user") + const orgId = c.get("activeOrganizationId") + const params = c.req.valid("param") + const input = c.req.valid("json") + + if (!orgId) { + return c.json({ error: "worker_not_found" }, 404) + } + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const worker = await getWorkerByIdForOrg(workerId, orgId) + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + if (worker.created_by_user_id !== user.id) { + return c.json({ + error: "forbidden", + message: "Only the worker owner can rename this worker.", + }, 403) + } + + await db.update(WorkerTable).set({ name: input.name }).where(eq(WorkerTable.id, workerId)) + + return c.json({ + worker: toWorkerResponse( + { + ...worker, + name: input.name, + updated_at: new Date(), + }, + user.id, + ), + }) + }, + ) + + app.post( + "/v1/workers/:id/tokens", + describeRoute({ + tags: ["Workers"], + summary: "Get worker connection tokens", + description: "Returns connection tokens and the resolved OpenWork connect URL for an existing worker.", + responses: { + 200: jsonResponse("Worker connection tokens returned successfully.", workerTokensResponseSchema), + 400: jsonResponse("The worker token path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to request worker tokens.", unauthorizedSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + 409: jsonResponse("The worker is not ready to return connection tokens yet.", workerRuntimeUnavailableSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + paramValidator(workerIdParamSchema), + async (c) => { + const orgId = c.get("activeOrganizationId") + const params = c.req.valid("param") + + if (!orgId) { + return c.json({ error: "worker_not_found" }, 404) + } + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const worker = await getWorkerByIdForOrg(workerId, orgId) + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + const resolved = await getWorkerTokensAndConnect(worker) + if ("error" in resolved && resolved.error) { + return new Response(JSON.stringify(resolved.error.body), { + status: resolved.error.status, + headers: { + "Content-Type": "application/json", + }, + }) + } + + return c.json(resolved) + }, + ) + + app.delete( + "/v1/workers/:id", + describeRoute({ + tags: ["Workers"], + summary: "Delete worker", + description: "Deletes a worker and cascades cleanup for its tokens, runtime records, and provider-specific resources.", + responses: { + 204: emptyResponse("Worker deleted successfully."), + 400: jsonResponse("The worker deletion path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to delete workers.", unauthorizedSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + paramValidator(workerIdParamSchema), + async (c) => { + const orgId = c.get("activeOrganizationId") + const params = c.req.valid("param") + + if (!orgId) { + return c.json({ error: "worker_not_found" }, 404) + } + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const worker = await getWorkerByIdForOrg(workerId, orgId) + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + await deleteWorkerCascade(worker) + return c.body(null, 204) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/workers/index.ts b/ee/apps/den-api/src/routes/workers/index.ts new file mode 100644 index 0000000000..4c14419dfb --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/index.ts @@ -0,0 +1,13 @@ +import type { Hono } from "hono" +import type { WorkerRouteVariables } from "./shared.js" +import { registerWorkerActivityRoutes } from "./activity.js" +import { registerWorkerBillingRoutes } from "./billing.js" +import { registerWorkerCoreRoutes } from "./core.js" +import { registerWorkerRuntimeRoutes } from "./runtime.js" + +export function registerWorkerRoutes(app: Hono) { + registerWorkerActivityRoutes(app) + registerWorkerBillingRoutes(app) + registerWorkerCoreRoutes(app) + registerWorkerRuntimeRoutes(app) +} diff --git a/ee/apps/den-api/src/routes/workers/runtime.ts b/ee/apps/den-api/src/routes/workers/runtime.ts new file mode 100644 index 0000000000..244e574936 --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/runtime.ts @@ -0,0 +1,115 @@ +import type { Hono } from "hono" +import { describeRoute } from "hono-openapi" +import { z } from "zod" +import { jsonValidator, paramValidator, requireUserMiddleware, resolveUserOrganizationsMiddleware } from "../../middleware/index.js" +import { invalidRequestSchema, jsonResponse, notFoundSchema, unauthorizedSchema } from "../../openapi.js" +import type { WorkerRouteVariables } from "./shared.js" +import { fetchWorkerRuntimeJson, getWorkerByIdForOrg, parseWorkerIdParam, workerIdParamSchema } from "./shared.js" + +const workerRuntimeResponseSchema = z.object({}).passthrough().meta({ ref: "WorkerRuntimeResponse" }) + +export function registerWorkerRuntimeRoutes(app: Hono) { + app.get( + "/v1/workers/:id/runtime", + describeRoute({ + tags: ["Workers", "Worker Runtime"], + summary: "Get worker runtime status", + description: "Fetches runtime version and status information from a specific worker's runtime endpoint.", + responses: { + 200: jsonResponse("Worker runtime information returned successfully.", workerRuntimeResponseSchema), + 400: jsonResponse("The worker runtime path parameters were invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to read worker runtime information.", unauthorizedSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + paramValidator(workerIdParamSchema), + async (c) => { + const orgId = c.get("activeOrganizationId") + const params = c.req.valid("param") + + if (!orgId) { + return c.json({ error: "worker_not_found" }, 404) + } + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const worker = await getWorkerByIdForOrg(workerId, orgId) + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + const runtime = await fetchWorkerRuntimeJson({ + workerId: worker.id, + path: "/runtime/versions", + }) + + return new Response(JSON.stringify(runtime.payload), { + status: runtime.status, + headers: { + "Content-Type": "application/json", + }, + }) + }, + ) + + app.post( + "/v1/workers/:id/runtime/upgrade", + describeRoute({ + tags: ["Workers", "Worker Runtime"], + summary: "Upgrade worker runtime", + description: "Forwards a runtime upgrade request to a specific worker and returns the worker runtime's response.", + responses: { + 200: jsonResponse("Worker runtime upgrade request completed successfully.", workerRuntimeResponseSchema), + 400: jsonResponse("The runtime upgrade request was invalid.", invalidRequestSchema), + 401: jsonResponse("The caller must be signed in to upgrade a worker runtime.", unauthorizedSchema), + 404: jsonResponse("The worker could not be found.", notFoundSchema), + }, + }), + requireUserMiddleware, + resolveUserOrganizationsMiddleware, + paramValidator(workerIdParamSchema), + jsonValidator(z.object({}).passthrough()), + async (c) => { + const orgId = c.get("activeOrganizationId") + const params = c.req.valid("param") + const body = c.req.valid("json") + + if (!orgId) { + return c.json({ error: "worker_not_found" }, 404) + } + + let workerId + try { + workerId = parseWorkerIdParam(params.id) + } catch { + return c.json({ error: "worker_not_found" }, 404) + } + + const worker = await getWorkerByIdForOrg(workerId, orgId) + if (!worker) { + return c.json({ error: "worker_not_found" }, 404) + } + + const runtime = await fetchWorkerRuntimeJson({ + workerId: worker.id, + path: "/runtime/upgrade", + method: "POST", + body, + }) + + return new Response(JSON.stringify(runtime.payload), { + status: runtime.status, + headers: { + "Content-Type": "application/json", + }, + }) + }, + ) +} diff --git a/ee/apps/den-api/src/routes/workers/shared.ts b/ee/apps/den-api/src/routes/workers/shared.ts new file mode 100644 index 0000000000..9bd16c8c48 --- /dev/null +++ b/ee/apps/den-api/src/routes/workers/shared.ts @@ -0,0 +1,495 @@ +import { randomBytes } from "node:crypto" +import { and, asc, desc, eq, isNull } from "@openwork-ee/den-db/drizzle" +import { + AuditEventTable, + AuthUserTable, + DaytonaSandboxTable, + MemberTable, + WorkerBundleTable, + WorkerInstanceTable, + WorkerTable, + WorkerTokenTable, +} from "@openwork-ee/den-db/schema" +import { createDenTypeId, normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import { z } from "zod" +import { getCloudWorkerBillingStatus, requireCloudWorkerAccess, setCloudWorkerSubscriptionCancellation } from "../../billing/polar.js" +import { db } from "../../db.js" +import { env } from "../../env.js" +import type { UserOrganizationsContext } from "../../middleware/index.js" +import { denTypeIdSchema } from "../../openapi.js" +import type { AuthContextVariables } from "../../session.js" +import { deprovisionWorker, provisionWorker } from "../../workers/provisioner.js" +import { customDomainForWorker } from "../../workers/vanity-domain.js" + +export const createWorkerSchema = z.object({ + name: z.string().min(1), + description: z.string().optional(), + destination: z.enum(["local", "cloud"]), + workspacePath: z.string().optional(), + sandboxBackend: z.string().optional(), + imageVersion: z.string().optional(), +}) + +export const updateWorkerSchema = z.object({ + name: z.string().trim().min(1).max(255), +}) + +export const listWorkersQuerySchema = z.object({ + limit: z.coerce.number().int().min(1).max(50).default(20), +}) + +export const billingQuerySchema = z.object({ + includeCheckout: z.string().optional(), + excludePortal: z.string().optional(), + excludeInvoices: z.string().optional(), +}) + +export const billingSubscriptionSchema = z.object({ + cancelAtPeriodEnd: z.boolean().default(true), +}) + +export const activityHeartbeatSchema = z.object({ + sentAt: z.string().datetime().optional(), + isActiveRecently: z.boolean(), + lastActivityAt: z.string().datetime().optional().nullable(), + openSessionCount: z.number().int().min(0).optional(), +}) + +export const workerIdParamSchema = z.object({ + id: denTypeIdSchema("worker"), +}) + +export type WorkerRouteVariables = AuthContextVariables & Partial + +type WorkerRow = typeof WorkerTable.$inferSelect +type WorkerInstanceRow = typeof WorkerInstanceTable.$inferSelect +export type WorkerId = WorkerRow["id"] +type OrgId = typeof MemberTable.$inferSelect.organizationId +type UserId = typeof AuthUserTable.$inferSelect.id + +export const token = () => randomBytes(32).toString("hex") + +export function parseWorkerIdParam(value: string): WorkerId { + return normalizeDenTypeId("worker", value) +} + +export function parseUserId(value: string): UserId { + return normalizeDenTypeId("user", value) +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null +} + +function normalizeUrl(value: string): string { + return value.trim().replace(/\/+$/, "") +} + +function parseWorkspaceSelection(payload: unknown): { workspaceId: string; openworkUrl: string } | null { + if (!isRecord(payload) || !Array.isArray(payload.items)) { + return null + } + + const activeId = typeof payload.activeId === "string" ? payload.activeId : null + let workspaceId = activeId + + if (!workspaceId) { + for (const item of payload.items) { + if (isRecord(item) && typeof item.id === "string" && item.id.trim()) { + workspaceId = item.id + break + } + } + } + + const baseUrl = typeof payload.baseUrl === "string" ? normalizeUrl(payload.baseUrl) : "" + if (!workspaceId || !baseUrl) { + return null + } + + return { + workspaceId, + openworkUrl: `${baseUrl}/w/${encodeURIComponent(workspaceId)}`, + } +} + +async function resolveConnectUrlFromWorker(instanceUrl: string, clientToken: string) { + const baseUrl = normalizeUrl(instanceUrl) + if (!baseUrl || !clientToken.trim()) { + return null + } + + try { + const response = await fetch(`${baseUrl}/workspaces`, { + method: "GET", + headers: { + Accept: "application/json", + Authorization: `Bearer ${clientToken.trim()}`, + }, + }) + + if (!response.ok) { + return null + } + + const payload = (await response.json()) as unknown + const selected = parseWorkspaceSelection({ + ...(isRecord(payload) ? payload : {}), + baseUrl, + }) + return selected + } catch { + return null + } +} + +function getConnectUrlCandidates(workerId: WorkerId, instanceUrl: string | null) { + const candidates: string[] = [] + const vanityHostname = customDomainForWorker(workerId, env.render.workerPublicDomainSuffix) + if (vanityHostname) { + candidates.push(`https://${vanityHostname}`) + } + + if (instanceUrl) { + const normalized = normalizeUrl(instanceUrl) + if (normalized && !candidates.includes(normalized)) { + candidates.push(normalized) + } + } + + return candidates +} + +export function queryIncludesFlag(value: string | undefined): boolean { + if (typeof value !== "string") { + return false + } + + const normalized = value.trim().toLowerCase() + return normalized === "1" || normalized === "true" || normalized === "yes" +} + +export function readBearerToken(value: string | undefined) { + const trimmed = value?.trim() ?? "" + if (!trimmed.toLowerCase().startsWith("bearer ")) { + return null + } + const tokenValue = trimmed.slice(7).trim() + return tokenValue ? tokenValue : null +} + +export function parseHeartbeatTimestamp(value: string | null | undefined) { + if (!value) { + return null + } + const parsed = new Date(value) + if (Number.isNaN(parsed.getTime())) { + return null + } + return parsed +} + +export function newerDate(current: Date | null | undefined, candidate: Date | null | undefined) { + if (!candidate) { + return current ?? null + } + if (!current) { + return candidate + } + return candidate.getTime() > current.getTime() ? candidate : current +} + +async function resolveConnectUrlFromCandidates(workerId: WorkerId, instanceUrl: string | null, clientToken: string) { + const candidates = getConnectUrlCandidates(workerId, instanceUrl) + for (const candidate of candidates) { + const resolved = await resolveConnectUrlFromWorker(candidate, clientToken) + if (resolved) { + return resolved + } + } + return null +} + +async function getWorkerRuntimeAccess(workerId: WorkerId) { + const instance = await getLatestWorkerInstance(workerId) + const tokenRows = await db + .select() + .from(WorkerTokenTable) + .where(and(eq(WorkerTokenTable.worker_id, workerId), isNull(WorkerTokenTable.revoked_at))) + .orderBy(asc(WorkerTokenTable.created_at)) + + const hostToken = tokenRows.find((entry) => entry.scope === "host")?.token ?? null + if (!instance?.url || !hostToken) { + return null + } + + return { + instance, + hostToken, + candidates: getConnectUrlCandidates(workerId, instance.url), + } +} + +export async function fetchWorkerRuntimeJson(input: { + workerId: WorkerId + path: string + method?: "GET" | "POST" + body?: unknown +}) { + const access = await getWorkerRuntimeAccess(input.workerId) + if (!access) { + return { + ok: false as const, + status: 409, + payload: { + error: "worker_runtime_unavailable", + message: "Worker runtime access is not ready yet. Wait for provisioning to finish and try again.", + }, + } + } + + let lastPayload: unknown = null + let lastStatus = 502 + + for (const candidate of access.candidates) { + try { + const response = await fetch(`${normalizeUrl(candidate)}${input.path}`, { + method: input.method ?? "GET", + headers: { + Accept: "application/json", + "Content-Type": "application/json", + "X-OpenWork-Host-Token": access.hostToken, + }, + body: input.body === undefined ? undefined : JSON.stringify(input.body), + }) + + const text = await response.text() + lastStatus = response.status + try { + lastPayload = text ? JSON.parse(text) : null + } catch { + lastPayload = text ? { message: text } : null + } + + if (response.ok) { + return { ok: true as const, status: response.status, payload: lastPayload } + } + } catch (error) { + lastPayload = { message: error instanceof Error ? error.message : "worker_request_failed" } + } + } + + return { ok: false as const, status: lastStatus, payload: lastPayload } +} + +export async function countUserCloudWorkers(userId: UserId) { + const rows = await db + .select({ id: WorkerTable.id }) + .from(WorkerTable) + .where(and(eq(WorkerTable.created_by_user_id, userId), eq(WorkerTable.destination, "cloud"))) + .limit(2) + + return rows.length +} + +export async function getLatestWorkerInstance(workerId: WorkerId) { + const rows = await db + .select() + .from(WorkerInstanceTable) + .where(eq(WorkerInstanceTable.worker_id, workerId)) + .orderBy(desc(WorkerInstanceTable.created_at)) + .limit(1) + + return rows[0] ?? null +} + +export function toInstanceResponse(instance: WorkerInstanceRow | null) { + if (!instance) { + return null + } + + return { + provider: instance.provider, + region: instance.region, + url: instance.url, + status: instance.status, + createdAt: instance.created_at, + updatedAt: instance.updated_at, + } +} + +export function toWorkerResponse(row: WorkerRow, userId: string) { + return { + id: row.id, + orgId: row.org_id, + createdByUserId: row.created_by_user_id, + isMine: row.created_by_user_id === userId, + name: row.name, + description: row.description, + destination: row.destination, + status: row.status, + imageVersion: row.image_version, + workspacePath: row.workspace_path, + sandboxBackend: row.sandbox_backend, + lastHeartbeatAt: row.last_heartbeat_at, + lastActiveAt: row.last_active_at, + createdAt: row.created_at, + updatedAt: row.updated_at, + } +} + +export async function continueCloudProvisioning(input: { + workerId: WorkerId + name: string + hostToken: string + clientToken: string + activityToken: string +}) { + try { + const provisioned = await provisionWorker({ + workerId: input.workerId, + name: input.name, + hostToken: input.hostToken, + clientToken: input.clientToken, + activityToken: input.activityToken, + }) + + await db + .update(WorkerTable) + .set({ status: provisioned.status }) + .where(eq(WorkerTable.id, input.workerId)) + + await db.insert(WorkerInstanceTable).values({ + id: createDenTypeId("workerInstance"), + worker_id: input.workerId, + provider: provisioned.provider, + region: provisioned.region, + url: provisioned.url, + status: provisioned.status, + }) + } catch (error) { + await db + .update(WorkerTable) + .set({ status: "failed" }) + .where(eq(WorkerTable.id, input.workerId)) + + const message = error instanceof Error ? error.message : "provisioning_failed" + console.error(`[workers] provisioning failed for ${input.workerId}: ${message}`) + } +} + +export async function requireCloudAccessOrPayment(input: { + userId: UserId + email: string + name: string +}) { + return requireCloudWorkerAccess(input) +} + +export async function getWorkerBilling(input: { + userId: UserId + email: string + name: string + includeCheckoutUrl: boolean + includePortalUrl: boolean + includeInvoices: boolean +}) { + return getCloudWorkerBillingStatus( + { + userId: input.userId, + email: input.email, + name: input.name, + }, + { + includeCheckoutUrl: input.includeCheckoutUrl, + includePortalUrl: input.includePortalUrl, + includeInvoices: input.includeInvoices, + }, + ) +} + +export async function setWorkerBillingSubscription(input: { + userId: UserId + email: string + name: string + cancelAtPeriodEnd: boolean +}) { + return setCloudWorkerSubscriptionCancellation( + { + userId: input.userId, + email: input.email, + name: input.name, + }, + input.cancelAtPeriodEnd, + ) +} + +export async function getWorkerTokensAndConnect(worker: WorkerRow) { + const tokenRows = await db + .select() + .from(WorkerTokenTable) + .where(and(eq(WorkerTokenTable.worker_id, worker.id), isNull(WorkerTokenTable.revoked_at))) + .orderBy(asc(WorkerTokenTable.created_at)) + + const hostToken = tokenRows.find((entry) => entry.scope === "host")?.token ?? null + const clientToken = tokenRows.find((entry) => entry.scope === "client")?.token ?? null + + if (!hostToken || !clientToken) { + return { + error: { + status: 409, + body: { + error: "worker_tokens_unavailable", + message: "Worker tokens are missing for this worker. Launch a new worker and try again.", + }, + }, + } + } + + const instance = await getLatestWorkerInstance(worker.id) + const connect = await resolveConnectUrlFromCandidates(worker.id, instance?.url ?? null, clientToken) + + return { + tokens: { + owner: hostToken, + host: hostToken, + client: clientToken, + }, + connect: connect ?? (instance?.url ? { openworkUrl: instance.url, workspaceId: null } : null), + } +} + +export async function deleteWorkerCascade(worker: WorkerRow) { + const instance = await getLatestWorkerInstance(worker.id) + + if (worker.destination === "cloud") { + try { + await deprovisionWorker({ + workerId: worker.id, + instanceUrl: instance?.url ?? null, + }) + } catch (error) { + const message = error instanceof Error ? error.message : "deprovision_failed" + console.warn(`[workers] deprovision warning for ${worker.id}: ${message}`) + } + } + + await db.transaction(async (tx) => { + await tx.delete(WorkerTokenTable).where(eq(WorkerTokenTable.worker_id, worker.id)) + await tx.delete(DaytonaSandboxTable).where(eq(DaytonaSandboxTable.worker_id, worker.id)) + await tx.delete(WorkerInstanceTable).where(eq(WorkerInstanceTable.worker_id, worker.id)) + await tx.delete(WorkerBundleTable).where(eq(WorkerBundleTable.worker_id, worker.id)) + await tx.delete(AuditEventTable).where(eq(AuditEventTable.worker_id, worker.id)) + await tx.delete(WorkerTable).where(eq(WorkerTable.id, worker.id)) + }) +} + +export async function getWorkerByIdForOrg(workerId: WorkerId, orgId: OrgId) { + const rows = await db + .select() + .from(WorkerTable) + .where(and(eq(WorkerTable.id, workerId), eq(WorkerTable.org_id, orgId))) + .limit(1) + + return rows[0] ?? null +} diff --git a/ee/apps/den-api/src/server.ts b/ee/apps/den-api/src/server.ts new file mode 100644 index 0000000000..a60f9ac887 --- /dev/null +++ b/ee/apps/den-api/src/server.ts @@ -0,0 +1,7 @@ +import { serve } from "@hono/node-server" +import app from "./app.js" +import { env } from "./env.js" + +serve({ fetch: app.fetch, port: env.port }, (info) => { + console.log(`den-api listening on ${info.port}`) +}) diff --git a/ee/apps/den-api/src/session.ts b/ee/apps/den-api/src/session.ts new file mode 100644 index 0000000000..c455b0e00b --- /dev/null +++ b/ee/apps/den-api/src/session.ts @@ -0,0 +1,220 @@ +import { and, eq, gt } from "@openwork-ee/den-db/drizzle" +import { AuthSessionTable, AuthUserTable } from "@openwork-ee/den-db/schema" +import { normalizeDenTypeId } from "@openwork-ee/utils/typeid" +import { createHmac, timingSafeEqual } from "node:crypto" +import type { MiddlewareHandler } from "hono" +import { DEN_API_KEY_HEADER, getApiKeySessionById, type DenApiKeySession } from "./api-keys.js" +import { auth } from "./auth.js" +import { db } from "./db.js" +import { env } from "./env.js" + +type AuthSessionLike = Awaited> +type AuthSessionValue = NonNullable + +export type AuthContextVariables = { + user: AuthSessionValue["user"] | null + session: AuthSessionValue["session"] | null + apiKey: DenApiKeySession | null +} + +const INTERNAL_MCP_PRINCIPAL_HEADER = "x-den-internal-mcp-principal" +const INTERNAL_MCP_PRINCIPAL_TTL_MS = 60_000 + +type InternalMcpPrincipal = { + userId: string + organizationId: string + expiresAt: number +} + +function signPrincipalPayload(payload: string) { + return createHmac("sha256", env.betterAuthSecret).update(payload).digest("base64url") +} + +function verifySignature(payload: string, signature: string) { + const expected = signPrincipalPayload(payload) + const expectedBuffer = new Uint8Array(Buffer.from(expected)) + const receivedBuffer = new Uint8Array(Buffer.from(signature)) + return expectedBuffer.length === receivedBuffer.length && timingSafeEqual(expectedBuffer, receivedBuffer) +} + +export function createInternalMcpPrincipalHeader(input: { userId: string; organizationId: string }) { + const principal: InternalMcpPrincipal = { + userId: normalizeDenTypeId("user", input.userId), + organizationId: normalizeDenTypeId("organization", input.organizationId), + expiresAt: Date.now() + INTERNAL_MCP_PRINCIPAL_TTL_MS, + } + const payload = Buffer.from(JSON.stringify(principal), "utf8").toString("base64url") + return `${payload}.${signPrincipalPayload(payload)}` +} + +async function getSessionFromInternalMcpPrincipal(headers: Headers): Promise<(AuthSessionValue & { activeOrganizationId: string }) | null> { + const header = headers.get(INTERNAL_MCP_PRINCIPAL_HEADER) + if (!header) { + return null + } + + const [payload, signature] = header.split(".") + if (!payload || !signature || !verifySignature(payload, signature)) { + return null + } + + let parsed: InternalMcpPrincipal + try { + parsed = JSON.parse(Buffer.from(payload, "base64url").toString("utf8")) as InternalMcpPrincipal + } catch { + return null + } + + if (typeof parsed.userId !== "string" || typeof parsed.organizationId !== "string" || typeof parsed.expiresAt !== "number" || parsed.expiresAt < Date.now()) { + return null + } + + const rows = await db + .select({ + id: AuthUserTable.id, + name: AuthUserTable.name, + email: AuthUserTable.email, + emailVerified: AuthUserTable.emailVerified, + image: AuthUserTable.image, + createdAt: AuthUserTable.createdAt, + updatedAt: AuthUserTable.updatedAt, + }) + .from(AuthUserTable) + .where(eq(AuthUserTable.id, normalizeDenTypeId("user", parsed.userId))) + .limit(1) + + const user = rows[0] + if (!user) { + return null + } + + return { + user: { + ...user, + id: normalizeDenTypeId("user", user.id), + }, + session: { + id: "mcp_internal", + token: "mcp_internal", + userId: user.id, + activeOrganizationId: normalizeDenTypeId("organization", parsed.organizationId), + activeTeamId: null, + expiresAt: new Date(parsed.expiresAt), + createdAt: new Date(), + updatedAt: new Date(), + ipAddress: null, + userAgent: null, + }, + activeOrganizationId: normalizeDenTypeId("organization", parsed.organizationId), + } +} + +function readBearerToken(headers: Headers): string | null { + const header = headers.get("authorization")?.trim() ?? "" + if (!header) { + return null + } + + const match = header.match(/^Bearer\s+(.+)$/i) + if (!match) { + return null + } + + const token = match[1]?.trim() ?? "" + return token || null +} + +async function getSessionFromBearerToken(token: string): Promise { + const rows = await db + .select({ + session: { + id: AuthSessionTable.id, + token: AuthSessionTable.token, + userId: AuthSessionTable.userId, + activeOrganizationId: AuthSessionTable.activeOrganizationId, + activeTeamId: AuthSessionTable.activeTeamId, + expiresAt: AuthSessionTable.expiresAt, + createdAt: AuthSessionTable.createdAt, + updatedAt: AuthSessionTable.updatedAt, + ipAddress: AuthSessionTable.ipAddress, + userAgent: AuthSessionTable.userAgent, + }, + user: { + id: AuthUserTable.id, + name: AuthUserTable.name, + email: AuthUserTable.email, + emailVerified: AuthUserTable.emailVerified, + image: AuthUserTable.image, + createdAt: AuthUserTable.createdAt, + updatedAt: AuthUserTable.updatedAt, + }, + }) + .from(AuthSessionTable) + .innerJoin(AuthUserTable, eq(AuthSessionTable.userId, AuthUserTable.id)) + .where(and(eq(AuthSessionTable.token, token), gt(AuthSessionTable.expiresAt, new Date()))) + .limit(1) + + const row = rows[0] + if (!row) { + return null + } + + return { + session: row.session, + user: { + ...row.user, + id: normalizeDenTypeId("user", row.user.id), + }, + } +} + +export async function getRequestSession(headers: Headers): Promise { + const internalMcpSession = await getSessionFromInternalMcpPrincipal(headers) + if (internalMcpSession) { + return internalMcpSession + } + + let cookieSession: AuthSessionLike + try { + cookieSession = await auth.api.getSession({ headers }) + } catch { + return null + } + + if (cookieSession?.user?.id) { + return { + ...cookieSession, + user: { + ...cookieSession.user, + id: normalizeDenTypeId("user", cookieSession.user.id), + }, + } + } + + const bearerToken = readBearerToken(headers) + if (!bearerToken) { + return null + } + + return getSessionFromBearerToken(bearerToken) +} + +async function getRequestApiKeySession(headers: Headers, session: AuthSessionLike): Promise { + if (!headers.has(DEN_API_KEY_HEADER) || !session?.session?.id) { + return null + } + + return getApiKeySessionById(session.session.id) +} + +export const sessionMiddleware: MiddlewareHandler<{ Variables: AuthContextVariables }> = async (c, next) => { + const resolved = await getRequestSession(c.req.raw.headers) + const apiKey = await getRequestApiKeySession(c.req.raw.headers, resolved) + c.set("user", resolved?.user ?? null) + c.set("session", resolved?.session ?? null) + if (resolved?.session?.activeOrganizationId) { + ;(c as unknown as { set: (key: string, value: unknown) => void }).set("activeOrganizationId", resolved.session.activeOrganizationId) + } + c.set("apiKey", apiKey) + await next() +} diff --git a/ee/apps/den-api/src/user.ts b/ee/apps/den-api/src/user.ts new file mode 100644 index 0000000000..542622cf2e --- /dev/null +++ b/ee/apps/den-api/src/user.ts @@ -0,0 +1,8 @@ +export function getRequiredUserEmail(user: { id: string; email?: string | null }) { + const email = user.email?.trim() + if (!email) { + return null + } + + return email +} diff --git a/ee/apps/den-api/src/version.ts b/ee/apps/den-api/src/version.ts new file mode 100644 index 0000000000..f19c2386d9 --- /dev/null +++ b/ee/apps/den-api/src/version.ts @@ -0,0 +1,13 @@ +import { BUILD_LATEST_APP_VERSION } from "./generated/app-version.js"; + +const MIN_APP_VERSION = "0.11.207"; + +function normalizeVersion(value: string | undefined | null) { + const trimmed = value?.trim() ?? ""; + return trimmed || null; +} + +export const denApiAppVersion = { + minAppVersion: MIN_APP_VERSION, + latestAppVersion: normalizeVersion(BUILD_LATEST_APP_VERSION) ?? "0.0.0", +} as const; diff --git a/ee/apps/den-api/src/workers/daytona.ts b/ee/apps/den-api/src/workers/daytona.ts new file mode 100644 index 0000000000..0ec0719057 --- /dev/null +++ b/ee/apps/den-api/src/workers/daytona.ts @@ -0,0 +1,559 @@ +import { Daytona, type Sandbox } from "@daytonaio/sdk" +import { eq } from "@openwork-ee/den-db/drizzle" +import { DaytonaSandboxTable } from "@openwork-ee/den-db/schema" +import { createDenTypeId } from "@openwork-ee/utils/typeid" +import { db } from "../db.js" +import { env } from "../env.js" + +type WorkerId = typeof DaytonaSandboxTable.$inferSelect.worker_id + +type ProvisionInput = { + workerId: WorkerId + name: string + hostToken: string + clientToken: string + activityToken: string +} + +type ProvisionedInstance = { + provider: string + url: string + status: "provisioning" | "healthy" + region?: string +} + +const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) +const maxSignedPreviewExpirySeconds = 60 * 60 * 24 +const signedPreviewRefreshLeadMs = 5 * 60 * 1000 + +const slug = (value: string) => + value + .toLowerCase() + .replace(/[^a-z0-9-]+/g, "-") + .replace(/-+/g, "-") + .replace(/^-|-$/g, "") + +function shellQuote(value: string) { + return `'${value.replace(/'/g, `'"'"'`)}'` +} + +function createDaytonaClient() { + return new Daytona({ + apiKey: env.daytona.apiKey, + apiUrl: env.daytona.apiUrl, + ...(env.daytona.target ? { target: env.daytona.target } : {}), + }) +} + +function normalizedSignedPreviewExpirySeconds() { + return Math.max( + 1, + Math.min(env.daytona.signedPreviewExpiresSeconds, maxSignedPreviewExpirySeconds), + ) +} + +function signedPreviewRefreshAt(expiresInSeconds: number) { + return new Date( + Date.now() + Math.max(0, expiresInSeconds * 1000 - signedPreviewRefreshLeadMs), + ) +} + +function workerProxyUrl(workerId: WorkerId) { + return `${env.daytona.workerProxyBaseUrl.replace(/\/+$/, "")}/${encodeURIComponent(workerId)}` +} + +function workerActivityHeartbeatUrl(workerId: WorkerId) { + const base = env.workerActivityBaseUrl.replace(/\/+$/, "") + return `${base}/v1/workers/${encodeURIComponent(workerId)}/activity-heartbeat` +} + +function assertDaytonaConfig() { + if (!env.daytona.apiKey) { + throw new Error("DAYTONA_API_KEY is required for daytona provisioner") + } +} + +function workerHint(workerId: WorkerId) { + return workerId.replace(/-/g, "").slice(0, 12) +} + +function sandboxLabels(workerId: WorkerId) { + return { + "openwork.den.provider": "daytona", + "openwork.den.worker-id": workerId, + } +} + +function sandboxName(input: ProvisionInput) { + return slug( + `${env.daytona.sandboxNamePrefix}-${input.name}-${workerHint(input.workerId)}`, + ).slice(0, 63) +} + +function sharedVolumeName() { + return slug(env.daytona.sharedVolumeName).slice(0, 63) +} + +function workerVolumeRootSubpath(workerId: WorkerId) { + return `workers/${workerId}` +} + +function workspaceVolumeSubpath(workerId: WorkerId) { + return `${workerVolumeRootSubpath(workerId)}/workspace` +} + +function dataVolumeSubpath(workerId: WorkerId) { + return `${workerVolumeRootSubpath(workerId)}/data` +} + +function sharedVolumeMounts(workerId: WorkerId, volumeId: string) { + return [ + { + volumeId, + mountPath: env.daytona.workspaceMountPath, + subpath: workspaceVolumeSubpath(workerId), + }, + { + volumeId, + mountPath: env.daytona.dataMountPath, + subpath: dataVolumeSubpath(workerId), + }, + ] +} + +function buildOpenWorkStartCommand(input: ProvisionInput) { + const verifyRuntimeStep = [ + "if ! command -v openwork >/dev/null 2>&1; then echo 'openwork binary missing from Daytona runtime image; rebuild and republish the Daytona snapshot' >&2; exit 1; fi", + "if ! command -v opencode >/dev/null 2>&1; then echo 'opencode binary missing from Daytona runtime image; rebuild and republish the Daytona snapshot' >&2; exit 1; fi", + ].join("; ") + const openworkServe = [ + "OPENWORK_DATA_DIR=", + shellQuote(env.daytona.runtimeDataPath), + " OPENWORK_SIDECAR_DIR=", + shellQuote(env.daytona.sidecarDir), + " OPENWORK_TOKEN=", + shellQuote(input.clientToken), + " OPENWORK_HOST_TOKEN=", + shellQuote(input.hostToken), + " DEN_RUNTIME_PROVIDER=", + shellQuote("daytona"), + " DEN_WORKER_ID=", + shellQuote(input.workerId), + " DEN_ACTIVITY_HEARTBEAT_ENABLED=", + shellQuote("1"), + " DEN_ACTIVITY_HEARTBEAT_URL=", + shellQuote(workerActivityHeartbeatUrl(input.workerId)), + " DEN_ACTIVITY_HEARTBEAT_TOKEN=", + shellQuote(input.activityToken), + " openwork serve", + ` --workspace ${shellQuote(env.daytona.runtimeWorkspacePath)}`, + ` --remote-access`, + ` --openwork-port ${env.daytona.openworkPort}`, + ` --opencode-host 127.0.0.1`, + ` --opencode-port ${env.daytona.opencodePort}`, + ` --connect-host 127.0.0.1`, + ` --cors '*'`, + ` --approval manual`, + ` --allow-external`, + ` --opencode-source external`, + ` --opencode-bin $(command -v opencode)`, + ` --no-opencode-router`, + ` --verbose`, + ].join("") + + const script = ` +set -u +mkdir -p ${shellQuote(env.daytona.workspaceMountPath)} ${shellQuote(env.daytona.dataMountPath)} ${shellQuote(env.daytona.runtimeWorkspacePath)} ${shellQuote(env.daytona.runtimeDataPath)} ${shellQuote(env.daytona.sidecarDir)} ${shellQuote(`${env.daytona.runtimeWorkspacePath}/volumes`)} +ln -sfn ${shellQuote(env.daytona.workspaceMountPath)} ${shellQuote(`${env.daytona.runtimeWorkspacePath}/volumes/workspace`) } +ln -sfn ${shellQuote(env.daytona.dataMountPath)} ${shellQuote(`${env.daytona.runtimeWorkspacePath}/volumes/data`) } +${verifyRuntimeStep} +attempt=0 +while [ "$attempt" -lt 3 ]; do + attempt=$((attempt + 1)) + if ${openworkServe}; then + exit 0 + fi + status=$? + echo "openwork serve failed (attempt $attempt, exit $status); retrying in 3s" + sleep 3 +done +exit 1 +`.trim() + + return `sh -lc ${shellQuote(script)}` +} + +async function waitForVolumeReady(daytona: Daytona, name: string, timeoutMs: number) { + const startedAt = Date.now() + + while (Date.now() - startedAt < timeoutMs) { + const volume = await daytona.volume.get(name) + if (volume.state === "ready") { + return volume + } + await sleep(env.daytona.pollIntervalMs) + } + + throw new Error(`Timed out waiting for Daytona volume ${name} to become ready`) +} + +function buildVolumeCleanupCommand(workerId: WorkerId) { + return [ + "node -e", + shellQuote( + [ + 'const fs = require("node:fs")', + 'const path = require("node:path")', + 'for (const dir of process.argv.slice(1)) {', + ' fs.mkdirSync(dir, { recursive: true })', + ' for (const entry of fs.readdirSync(dir)) {', + ' fs.rmSync(path.join(dir, entry), { recursive: true, force: true })', + ' }', + '}', + ].join("; "), + ), + shellQuote(env.daytona.workspaceMountPath), + shellQuote(env.daytona.dataMountPath), + ].join(" ") +} + +async function cleanupWorkerDataOnDaytona(daytona: Daytona, workerId: WorkerId) { + let sharedVolume + + try { + sharedVolume = await waitForVolumeReady( + daytona, + sharedVolumeName(), + env.daytona.createTimeoutSeconds * 1000, + ) + } catch (error) { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn(`[provisioner] failed to resolve shared Daytona volume for ${workerId}: ${message}`) + return + } + + let cleanupSandbox: Awaited> | null = null + + try { + cleanupSandbox = await daytona.create( + { + name: slug(`den-daytona-cleanup-${workerHint(workerId)}`).slice(0, 63), + image: env.daytona.image, + public: false, + autoStopInterval: 0, + autoArchiveInterval: 0, + autoDeleteInterval: 0, + ephemeral: true, + envVars: { + DEN_RUNTIME_PROVIDER: "daytona-cleanup", + DEN_WORKER_ID: workerId, + }, + resources: { + cpu: 1, + memory: 1, + disk: 4, + }, + volumes: sharedVolumeMounts(workerId, sharedVolume.id), + }, + { timeout: env.daytona.createTimeoutSeconds }, + ) + + const result = await cleanupSandbox.process.executeCommand( + buildVolumeCleanupCommand(workerId), + undefined, + undefined, + env.daytona.deleteTimeoutSeconds, + ) + + if (result.exitCode !== 0) { + throw new Error(result.result?.trim() || `cleanup command exited with ${result.exitCode}`) + } + } catch (error) { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn(`[provisioner] failed to cleanup Daytona worker data for ${workerId}: ${message}`) + } finally { + if (cleanupSandbox) { + await cleanupSandbox.delete(env.daytona.deleteTimeoutSeconds).catch((error) => { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn(`[provisioner] failed to delete Daytona cleanup sandbox for ${workerId}: ${message}`) + }) + } + } +} + +async function waitForHealth(url: string, timeoutMs: number, sandbox: Sandbox, sessionId: string, commandId: string) { + const startedAt = Date.now() + + while (Date.now() - startedAt < timeoutMs) { + try { + const response = await fetch(`${url.replace(/\/$/, "")}/health`, { method: "GET" }) + if (response.ok) { + return + } + } catch { + // ignore transient startup failures + } + + try { + const command = await sandbox.process.getSessionCommand(sessionId, commandId) + if (typeof command.exitCode === "number" && command.exitCode !== 0) { + const logs = await sandbox.process.getSessionCommandLogs(sessionId, commandId) + throw new Error( + [ + `openwork session exited with ${command.exitCode}`, + logs.stdout?.trim() ? `stdout:\n${logs.stdout.trim().slice(-4000)}` : "", + logs.stderr?.trim() ? `stderr:\n${logs.stderr.trim().slice(-4000)}` : "", + ] + .filter(Boolean) + .join("\n\n"), + ) + } + } catch (error) { + if (error instanceof Error && error.message.startsWith("openwork session exited")) { + throw error + } + } + + await sleep(env.daytona.pollIntervalMs) + } + + const logs = await sandbox.process.getSessionCommandLogs(sessionId, commandId).catch( + () => null, + ) + throw new Error( + [ + `Timed out waiting for Daytona worker health at ${url.replace(/\/$/, "")}/health`, + logs?.stdout?.trim() ? `stdout:\n${logs.stdout.trim().slice(-4000)}` : "", + logs?.stderr?.trim() ? `stderr:\n${logs.stderr.trim().slice(-4000)}` : "", + ] + .filter(Boolean) + .join("\n\n"), + ) +} + +async function upsertDaytonaSandbox(input: { + workerId: WorkerId + sandboxId: string + workspaceVolumeId: string + dataVolumeId: string + signedPreviewUrl: string + signedPreviewUrlExpiresAt: Date + region: string | null +}) { + const existing = await db + .select({ id: DaytonaSandboxTable.id }) + .from(DaytonaSandboxTable) + .where(eq(DaytonaSandboxTable.worker_id, input.workerId)) + .limit(1) + + if (existing.length > 0) { + await db + .update(DaytonaSandboxTable) + .set({ + sandbox_id: input.sandboxId, + workspace_volume_id: input.workspaceVolumeId, + data_volume_id: input.dataVolumeId, + signed_preview_url: input.signedPreviewUrl, + signed_preview_url_expires_at: input.signedPreviewUrlExpiresAt, + region: input.region, + }) + .where(eq(DaytonaSandboxTable.worker_id, input.workerId)) + return + } + + await db.insert(DaytonaSandboxTable).values({ + id: createDenTypeId("daytonaSandbox"), + worker_id: input.workerId, + sandbox_id: input.sandboxId, + workspace_volume_id: input.workspaceVolumeId, + data_volume_id: input.dataVolumeId, + signed_preview_url: input.signedPreviewUrl, + signed_preview_url_expires_at: input.signedPreviewUrlExpiresAt, + region: input.region, + }) +} + +export async function getDaytonaSandboxRecord(workerId: WorkerId) { + const rows = await db + .select() + .from(DaytonaSandboxTable) + .where(eq(DaytonaSandboxTable.worker_id, workerId)) + .limit(1) + + return rows[0] ?? null +} + +export async function refreshDaytonaSignedPreview(workerId: WorkerId) { + assertDaytonaConfig() + + const record = await getDaytonaSandboxRecord(workerId) + if (!record) { + return null + } + + const daytona = createDaytonaClient() + const sandbox = await daytona.get(record.sandbox_id) + await sandbox.refreshData() + + const expiresInSeconds = normalizedSignedPreviewExpirySeconds() + const preview = await sandbox.getSignedPreviewUrl(env.daytona.openworkPort, expiresInSeconds) + const expiresAt = signedPreviewRefreshAt(expiresInSeconds) + + await db + .update(DaytonaSandboxTable) + .set({ + signed_preview_url: preview.url, + signed_preview_url_expires_at: expiresAt, + region: sandbox.target, + }) + .where(eq(DaytonaSandboxTable.worker_id, workerId)) + + return { + ...record, + signed_preview_url: preview.url, + signed_preview_url_expires_at: expiresAt, + region: sandbox.target, + } +} + +export async function getDaytonaSignedPreviewForProxy(workerId: WorkerId) { + const record = await getDaytonaSandboxRecord(workerId) + if (!record) { + return null + } + + if (record.signed_preview_url_expires_at.getTime() > Date.now()) { + return record.signed_preview_url + } + + const refreshed = await refreshDaytonaSignedPreview(workerId) + return refreshed?.signed_preview_url ?? null +} + +export async function provisionWorkerOnDaytona( + input: ProvisionInput, +): Promise { + assertDaytonaConfig() + + const daytona = createDaytonaClient() + const labels = sandboxLabels(input.workerId) + const sharedVolumeNameValue = sharedVolumeName() + await daytona.volume.get(sharedVolumeNameValue, true) + const sharedVolume = await waitForVolumeReady( + daytona, + sharedVolumeNameValue, + env.daytona.createTimeoutSeconds * 1000, + ) + let sandbox: Awaited> | null = null + + try { + sandbox = env.daytona.snapshot + ? await daytona.create( + { + name: sandboxName(input), + snapshot: env.daytona.snapshot, + autoStopInterval: env.daytona.autoStopInterval, + autoArchiveInterval: env.daytona.autoArchiveInterval, + autoDeleteInterval: env.daytona.autoDeleteInterval, + public: env.daytona.public, + labels, + envVars: { + DEN_WORKER_ID: input.workerId, + DEN_RUNTIME_PROVIDER: "daytona", + }, + volumes: sharedVolumeMounts(input.workerId, sharedVolume.id), + }, + { timeout: env.daytona.createTimeoutSeconds }, + ) + : await daytona.create( + { + name: sandboxName(input), + image: env.daytona.image, + autoStopInterval: env.daytona.autoStopInterval, + autoArchiveInterval: env.daytona.autoArchiveInterval, + autoDeleteInterval: env.daytona.autoDeleteInterval, + public: env.daytona.public, + labels, + envVars: { + DEN_WORKER_ID: input.workerId, + DEN_RUNTIME_PROVIDER: "daytona", + }, + resources: { + cpu: env.daytona.resources.cpu, + memory: env.daytona.resources.memory, + disk: env.daytona.resources.disk, + }, + volumes: sharedVolumeMounts(input.workerId, sharedVolume.id), + }, + { timeout: env.daytona.createTimeoutSeconds }, + ) + + const sessionId = `openwork-${workerHint(input.workerId)}` + await sandbox.process.createSession(sessionId) + const command = await sandbox.process.executeSessionCommand( + sessionId, + { + command: buildOpenWorkStartCommand(input), + runAsync: true, + }, + 0, + ) + + const expiresInSeconds = normalizedSignedPreviewExpirySeconds() + const preview = await sandbox.getSignedPreviewUrl(env.daytona.openworkPort, expiresInSeconds) + await waitForHealth(preview.url, env.daytona.healthcheckTimeoutMs, sandbox, sessionId, command.cmdId) + await upsertDaytonaSandbox({ + workerId: input.workerId, + sandboxId: sandbox.id, + workspaceVolumeId: sharedVolume.id, + dataVolumeId: sharedVolume.id, + signedPreviewUrl: preview.url, + signedPreviewUrlExpiresAt: signedPreviewRefreshAt(expiresInSeconds), + region: sandbox.target ?? null, + }) + + return { + provider: "daytona", + url: workerProxyUrl(input.workerId), + status: "healthy", + region: sandbox.target, + } + } catch (error) { + if (sandbox) { + await sandbox.delete(env.daytona.deleteTimeoutSeconds).catch(() => {}) + } + throw error + } +} + +export async function deprovisionWorkerOnDaytona(workerId: WorkerId) { + assertDaytonaConfig() + + const daytona = createDaytonaClient() + const record = await getDaytonaSandboxRecord(workerId) + + if (record) { + try { + const sandbox = await daytona.get(record.sandbox_id) + await sandbox.delete(env.daytona.deleteTimeoutSeconds) + } catch (error) { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn(`[provisioner] failed to delete Daytona sandbox ${record.sandbox_id}: ${message}`) + } + + await cleanupWorkerDataOnDaytona(daytona, workerId) + + return + } + + const sandboxes = await daytona.list(sandboxLabels(workerId), 1, 20) + + for (const sandbox of sandboxes.items) { + await sandbox.delete(env.daytona.deleteTimeoutSeconds).catch((error) => { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn(`[provisioner] failed to delete Daytona sandbox ${sandbox.id}: ${message}`) + }) + } + + await cleanupWorkerDataOnDaytona(daytona, workerId) +} diff --git a/ee/apps/den-api/src/workers/provisioner.ts b/ee/apps/den-api/src/workers/provisioner.ts new file mode 100644 index 0000000000..dcf952a34e --- /dev/null +++ b/ee/apps/den-api/src/workers/provisioner.ts @@ -0,0 +1,406 @@ +import { WorkerTable } from "@openwork-ee/den-db/schema" +import { env } from "../env.js" +import { + deprovisionWorkerOnDaytona, + provisionWorkerOnDaytona, +} from "./daytona.js" +import { + customDomainForWorker, + ensureVercelDnsRecord, +} from "./vanity-domain.js" + +type WorkerId = typeof WorkerTable.$inferSelect.id + +export type ProvisionInput = { + workerId: WorkerId + name: string + hostToken: string + clientToken: string + activityToken: string +} + +export type ProvisionedInstance = { + provider: string + url: string + status: "provisioning" | "healthy" + region?: string +} + +type RenderService = { + id: string + name?: string + slug?: string + serviceDetails?: { + url?: string + region?: string + } +} + +type RenderServiceListRow = { + cursor?: string + service?: RenderService +} + +type RenderDeploy = { + id: string + status: string +} + +const terminalDeployStates = new Set([ + "live", + "update_failed", + "build_failed", + "canceled", +]) + +const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) + +const slug = (value: string) => + value + .toLowerCase() + .replace(/[^a-z0-9-]+/g, "-") + .replace(/-+/g, "-") + .replace(/^-|-$/g, "") + +const hostFromUrl = (value: string | null | undefined) => { + if (!value) { + return "" + } + + try { + return new URL(value).host.toLowerCase() + } catch { + return "" + } +} + +async function renderRequest( + path: string, + init: RequestInit = {}, +): Promise { + const headers = new Headers(init.headers) + headers.set("Authorization", `Bearer ${env.render.apiKey}`) + headers.set("Accept", "application/json") + + if (init.body && !headers.has("Content-Type")) { + headers.set("Content-Type", "application/json") + } + + const response = await fetch(`${env.render.apiBase}${path}`, { + ...init, + headers, + }) + const text = await response.text() + + if (!response.ok) { + throw new Error( + `Render API ${path} failed (${response.status}): ${text.slice(0, 400)}`, + ) + } + + if (!text) { + return null as T + } + + return JSON.parse(text) as T +} + +async function waitForDeployLive(serviceId: string) { + const startedAt = Date.now() + let latest: RenderDeploy | null = null + + while (Date.now() - startedAt < env.render.provisionTimeoutMs) { + const rows = await renderRequest>( + `/services/${serviceId}/deploys?limit=1`, + ) + latest = rows[0]?.deploy ?? null + + if (latest && terminalDeployStates.has(latest.status)) { + if (latest.status !== "live") { + throw new Error( + `Render deploy ${latest.id} ended with ${latest.status}`, + ) + } + return latest + } + + await sleep(env.render.pollIntervalMs) + } + + throw new Error( + `Timed out waiting for Render deploy for service ${serviceId}`, + ) +} + +async function waitForHealth( + url: string, + timeoutMs = env.render.healthcheckTimeoutMs, +) { + const healthUrl = `${url.replace(/\/$/, "")}/health` + const startedAt = Date.now() + + while (Date.now() - startedAt < timeoutMs) { + try { + const response = await fetch(healthUrl, { method: "GET" }) + if (response.ok) { + return + } + } catch { + // ignore transient network failures while the instance boots + } + await sleep(env.render.pollIntervalMs) + } + + throw new Error(`Timed out waiting for worker health endpoint ${healthUrl}`) +} + +async function listRenderServices(limit = 200) { + const rows: RenderService[] = [] + let cursor: string | undefined + + while (rows.length < limit) { + const query = new URLSearchParams({ limit: "100" }) + if (cursor) { + query.set("cursor", cursor) + } + + const page = await renderRequest( + `/services?${query.toString()}`, + ) + if (page.length === 0) { + break + } + + rows.push( + ...page + .map((entry) => entry.service) + .filter((entry): entry is RenderService => Boolean(entry?.id)), + ) + + const nextCursor = page[page.length - 1]?.cursor + if (!nextCursor || nextCursor === cursor) { + break + } + + cursor = nextCursor + } + + return rows.slice(0, limit) +} + +async function attachRenderCustomDomain( + serviceId: string, + workerId: string, + renderUrl: string, +) { + const hostname = customDomainForWorker( + workerId, + env.render.workerPublicDomainSuffix, + ) + if (!hostname) { + return null + } + + try { + await renderRequest(`/services/${serviceId}/custom-domains`, { + method: "POST", + body: JSON.stringify({ + name: hostname, + }), + }) + + const dnsReady = await ensureVercelDnsRecord({ + hostname, + targetUrl: renderUrl, + domain: env.vercel.dnsDomain ?? env.render.workerPublicDomainSuffix, + apiBase: env.vercel.apiBase, + token: env.vercel.token, + teamId: env.vercel.teamId, + teamSlug: env.vercel.teamSlug, + }) + + if (!dnsReady) { + console.warn( + `[provisioner] vanity dns upsert skipped or failed for ${hostname}; using Render URL fallback`, + ) + return null + } + + return `https://${hostname}` + } catch (error) { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn( + `[provisioner] custom domain attach failed for ${serviceId}: ${message}`, + ) + return null + } +} + +function assertRenderConfig() { + if (!env.render.apiKey) { + throw new Error("RENDER_API_KEY is required for render provisioner") + } + if (!env.render.ownerId) { + throw new Error("RENDER_OWNER_ID is required for render provisioner") + } +} + +async function provisionWorkerOnRender( + input: ProvisionInput, +): Promise { + assertRenderConfig() + + const serviceName = slug( + `${env.render.workerNamePrefix}-${input.name}-${input.workerId.slice(0, 8)}`, + ).slice(0, 62) + const orchestratorPackage = env.render.workerOpenworkVersion?.trim() + ? `openwork-orchestrator@${env.render.workerOpenworkVersion.trim()}` + : "openwork-orchestrator" + const buildCommand = [ + `npm install -g ${orchestratorPackage}`, + "node ./scripts/install-opencode.mjs", + ].join(" && ") + const startCommand = [ + "mkdir -p /tmp/workspace", + "attempt=0; while [ $attempt -lt 3 ]; do attempt=$((attempt + 1)); openwork serve --workspace /tmp/workspace --remote-access --openwork-port ${PORT:-10000} --opencode-host 127.0.0.1 --opencode-port 4096 --connect-host 127.0.0.1 --cors '*' --approval manual --allow-external --opencode-source external --opencode-bin ./bin/opencode --no-opencode-router --verbose && exit 0; echo \"openwork serve failed (attempt $attempt); retrying in 3s\"; sleep 3; done; exit 1", + ].join(" && ") + + const payload = { + type: "web_service", + name: serviceName, + ownerId: env.render.ownerId, + repo: env.render.workerRepo, + branch: env.render.workerBranch, + autoDeploy: "no", + rootDir: env.render.workerRootDir, + envVars: [ + { key: "OPENWORK_TOKEN", value: input.clientToken }, + { key: "OPENWORK_HOST_TOKEN", value: input.hostToken }, + { key: "DEN_WORKER_ID", value: input.workerId }, + ], + serviceDetails: { + runtime: "node", + plan: env.render.workerPlan, + region: env.render.workerRegion, + healthCheckPath: "/health", + envSpecificDetails: { + buildCommand, + startCommand, + }, + }, + } + + const created = await renderRequest<{ service: RenderService }>("/services", { + method: "POST", + body: JSON.stringify(payload), + }) + + const serviceId = created.service.id + await waitForDeployLive(serviceId) + const service = await renderRequest(`/services/${serviceId}`) + const renderUrl = service.serviceDetails?.url + + if (!renderUrl) { + throw new Error(`Render service ${serviceId} has no public URL`) + } + + await waitForHealth(renderUrl) + + const customUrl = await attachRenderCustomDomain( + serviceId, + input.workerId, + renderUrl, + ) + let url = renderUrl + + if (customUrl) { + try { + await waitForHealth(customUrl, env.render.customDomainReadyTimeoutMs) + url = customUrl + } catch { + console.warn( + `[provisioner] vanity domain not ready yet for ${input.workerId}; returning Render URL fallback`, + ) + } + } + + return { + provider: "render", + url, + status: "healthy", + region: service.serviceDetails?.region ?? env.render.workerRegion, + } +} + +export async function provisionWorker( + input: ProvisionInput, +): Promise { + if (env.provisionerMode === "render") { + return provisionWorkerOnRender(input) + } + + if (env.provisionerMode === "daytona") { + return provisionWorkerOnDaytona(input) + } + + const template = env.workerUrlTemplate ?? "https://workers.local/{workerId}" + const url = template.replace("{workerId}", input.workerId) + return { + provider: "stub", + url, + status: "provisioning", + } +} + +export async function deprovisionWorker(input: { + workerId: WorkerId + instanceUrl: string | null +}) { + if (env.provisionerMode === "daytona") { + await deprovisionWorkerOnDaytona(input.workerId) + return + } + + if (env.provisionerMode !== "render") { + return + } + + assertRenderConfig() + + const targetHost = hostFromUrl(input.instanceUrl) + const workerHint = input.workerId.slice(0, 8).toLowerCase() + + const services = await listRenderServices() + + const target = + services.find((service) => { + if (service.name?.toLowerCase().includes(workerHint)) { + return true + } + + if ( + targetHost && + hostFromUrl(service.serviceDetails?.url) === targetHost + ) { + return true + } + + return false + }) ?? null + + if (!target) { + return + } + + try { + await renderRequest(`/services/${target.id}/suspend`, { + method: "POST", + body: JSON.stringify({}), + }) + } catch (error) { + const message = error instanceof Error ? error.message : "unknown_error" + console.warn( + `[provisioner] failed to suspend Render service ${target.id}: ${message}`, + ) + } +} diff --git a/ee/apps/den-api/src/workers/vanity-domain.ts b/ee/apps/den-api/src/workers/vanity-domain.ts new file mode 100644 index 0000000000..13e675f4f9 --- /dev/null +++ b/ee/apps/den-api/src/workers/vanity-domain.ts @@ -0,0 +1,183 @@ +function normalizeUrl(value: string): string { + return value.trim().replace(/\/+$/, "") +} + +function slug(value: string) { + return value + .toLowerCase() + .replace(/[^a-z0-9-]+/g, "-") + .replace(/-+/g, "-") + .replace(/^-|-$/g, "") +} + +function splitHostname(hostname: string, domain: string): string | null { + const normalizedHost = hostname.trim().toLowerCase() + const normalizedDomain = domain.trim().toLowerCase() + if (!normalizedHost || !normalizedDomain) { + return null + } + + if (normalizedHost === normalizedDomain) { + return "" + } + + if (!normalizedHost.endsWith(`.${normalizedDomain}`)) { + return null + } + + return normalizedHost.slice(0, -(normalizedDomain.length + 1)) +} + +function hostFromUrl(value: string): string | null { + try { + return new URL(normalizeUrl(value)).host.toLowerCase() + } catch { + return null + } +} + +function withVercelScope(url: URL, teamId?: string, teamSlug?: string) { + if (teamId?.trim()) { + url.searchParams.set("teamId", teamId.trim()) + } else if (teamSlug?.trim()) { + url.searchParams.set("slug", teamSlug.trim()) + } + return url +} + +type VercelDnsRecord = { + id: string + type?: string + name?: string + value?: string +} + +async function vercelRequest(input: { + apiBase: string + token: string + path: string + teamId?: string + teamSlug?: string + method?: "GET" | "POST" | "PATCH" + body?: unknown +}): Promise { + const base = normalizeUrl(input.apiBase || "https://api.vercel.com") + const url = withVercelScope(new URL(`${base}${input.path}`), input.teamId, input.teamSlug) + const headers = new Headers({ + Authorization: `Bearer ${input.token}`, + Accept: "application/json", + }) + + const init: RequestInit = { + method: input.method ?? "GET", + headers, + } + + if (typeof input.body !== "undefined") { + headers.set("Content-Type", "application/json") + init.body = JSON.stringify(input.body) + } + + const response = await fetch(url, init) + const text = await response.text() + + if (!response.ok) { + throw new Error(`Vercel API ${input.path} failed (${response.status}): ${text.slice(0, 300)}`) + } + + if (!text) { + return null as T + } + + return JSON.parse(text) as T +} + +export function customDomainForWorker(workerId: string, suffix: string | null | undefined): string | null { + const normalizedSuffix = suffix?.trim().toLowerCase() + if (!normalizedSuffix) { + return null + } + + const label = slug(workerId).slice(0, 32) + if (!label) { + return null + } + + return `${label}.${normalizedSuffix}` +} + +export async function ensureVercelDnsRecord(input: { + hostname: string + targetUrl: string + domain: string | null | undefined + apiBase?: string + token?: string + teamId?: string + teamSlug?: string +}): Promise { + const domain = input.domain?.trim().toLowerCase() + const token = input.token?.trim() + if (!domain || !token) { + return false + } + + const name = splitHostname(input.hostname, domain) + const targetHost = hostFromUrl(input.targetUrl) + if (name === null || !targetHost) { + return false + } + + const list = await vercelRequest<{ records?: VercelDnsRecord[] }>({ + apiBase: input.apiBase ?? "https://api.vercel.com", + token, + teamId: input.teamId, + teamSlug: input.teamSlug, + path: `/v4/domains/${encodeURIComponent(domain)}/records`, + }) + + const records = Array.isArray(list.records) ? list.records : [] + const current = records.find((record) => { + if (!record?.id) { + return false + } + if ((record.type ?? "").toUpperCase() !== "CNAME") { + return false + } + return (record.name ?? "") === name + }) + + if (current && (current.value ?? "").toLowerCase() === targetHost.toLowerCase()) { + return true + } + + const payload = { + name, + type: "CNAME", + value: targetHost, + } + + if (current?.id) { + await vercelRequest({ + apiBase: input.apiBase ?? "https://api.vercel.com", + token, + teamId: input.teamId, + teamSlug: input.teamSlug, + method: "PATCH", + path: `/v4/domains/${encodeURIComponent(domain)}/records/${encodeURIComponent(current.id)}`, + body: payload, + }) + return true + } + + await vercelRequest({ + apiBase: input.apiBase ?? "https://api.vercel.com", + token, + teamId: input.teamId, + teamSlug: input.teamSlug, + method: "POST", + path: `/v4/domains/${encodeURIComponent(domain)}/records`, + body: payload, + }) + + return true +} diff --git a/ee/apps/den-api/test/connector-cleanup.test.ts b/ee/apps/den-api/test/connector-cleanup.test.ts new file mode 100644 index 0000000000..735575d3ec --- /dev/null +++ b/ee/apps/den-api/test/connector-cleanup.test.ts @@ -0,0 +1,74 @@ +import { describe, expect, test } from "bun:test" +import { planConnectorImportedResourceCleanup } from "../src/routes/org/plugin-system/connector-cleanup.js" + +describe("connector cleanup planning", () => { + test("deletes connector-owned plugins and marketplaces when only connector memberships remain", () => { + const result = planConnectorImportedResourceCleanup({ + activeMarketplaceMemberships: [ + { marketplaceId: "marketplace_1", membershipSource: "connector", pluginId: "plugin_1" }, + ], + activeMappingPluginIds: [], + activePluginMembershipPluginIds: [], + candidateMarketplaceIds: ["marketplace_1"], + candidatePluginIds: ["plugin_1"], + }) + + expect(result).toEqual({ + marketplaceIdsToDelete: ["marketplace_1"], + pluginIdsToDelete: ["plugin_1"], + }) + }) + + test("keeps plugins alive when active imported objects remain", () => { + const result = planConnectorImportedResourceCleanup({ + activeMarketplaceMemberships: [ + { marketplaceId: "marketplace_1", membershipSource: "connector", pluginId: "plugin_1" }, + ], + activeMappingPluginIds: [], + activePluginMembershipPluginIds: ["plugin_1"], + candidateMarketplaceIds: ["marketplace_1"], + candidatePluginIds: ["plugin_1"], + }) + + expect(result).toEqual({ + marketplaceIdsToDelete: [], + pluginIdsToDelete: [], + }) + }) + + test("deletes connector-owned sibling plugins through the marketplace tree", () => { + const result = planConnectorImportedResourceCleanup({ + activeMarketplaceMemberships: [ + { marketplaceId: "marketplace_1", membershipSource: "connector", pluginId: "plugin_1" }, + { marketplaceId: "marketplace_1", membershipSource: "connector", pluginId: "plugin_2" }, + ], + activeMappingPluginIds: [], + activePluginMembershipPluginIds: [], + candidateMarketplaceIds: ["marketplace_1"], + candidatePluginIds: ["plugin_1", "plugin_2"], + }) + + expect(result).toEqual({ + marketplaceIdsToDelete: ["marketplace_1"], + pluginIdsToDelete: ["plugin_1", "plugin_2"], + }) + }) + + test("keeps plugins alive when they still have non-connector marketplace dependencies", () => { + const result = planConnectorImportedResourceCleanup({ + activeMarketplaceMemberships: [ + { marketplaceId: "marketplace_1", membershipSource: "connector", pluginId: "plugin_1" }, + { marketplaceId: "marketplace_2", membershipSource: "manual", pluginId: "plugin_1" }, + ], + activeMappingPluginIds: [], + activePluginMembershipPluginIds: [], + candidateMarketplaceIds: ["marketplace_1"], + candidatePluginIds: ["plugin_1"], + }) + + expect(result).toEqual({ + marketplaceIdsToDelete: [], + pluginIdsToDelete: [], + }) + }) +}) diff --git a/ee/apps/den-api/test/github-connector-app.test.ts b/ee/apps/den-api/test/github-connector-app.test.ts new file mode 100644 index 0000000000..b488485eab --- /dev/null +++ b/ee/apps/den-api/test/github-connector-app.test.ts @@ -0,0 +1,180 @@ +import { describe, expect, test } from "bun:test" +import { generateKeyPairSync } from "node:crypto" +import { + buildGithubAppInstallUrl, + createGithubInstallStateToken, + createGithubAppJwt, + getGithubAppSummary, + getGithubConnectorAppConfig, + getGithubInstallationSummary, + listGithubInstallationRepositories, + normalizeGithubPrivateKey, + validateGithubInstallationTarget, + verifyGithubInstallStateToken, +} from "../src/routes/org/plugin-system/github-app.js" + +const { privateKey } = generateKeyPairSync("rsa", { modulusLength: 2048 }) +const privateKeyPem = privateKey.export({ format: "pem", type: "pkcs8" }).toString() + +describe("github connector app helpers", () => { + test("normalizes escaped private keys and produces a signed app JWT", () => { + const escapedKey = privateKeyPem.replace(/\n/g, "\\n") + expect(normalizeGithubPrivateKey(escapedKey)).toBe(privateKeyPem) + + const config = getGithubConnectorAppConfig({ + appId: "123456", + privateKey: escapedKey, + }) + const jwt = createGithubAppJwt({ ...config, now: new Date("2026-04-21T19:00:00.000Z") }) + const [headerSegment, payloadSegment, signatureSegment] = jwt.split(".") + + expect(signatureSegment.length).toBeGreaterThan(0) + expect(JSON.parse(Buffer.from(headerSegment, "base64url").toString("utf8"))).toEqual({ alg: "RS256", typ: "JWT" }) + expect(JSON.parse(Buffer.from(payloadSegment, "base64url").toString("utf8"))).toMatchObject({ + iss: "123456", + }) + }) + + test("lists repositories through the GitHub installation token flow", async () => { + const requests: Array<{ method: string; url: string }> = [] + const repositories = await listGithubInstallationRepositories({ + config: { appId: "123456", privateKey: privateKeyPem }, + fetchFn: async (url, init) => { + requests.push({ + method: init?.method ?? "GET", + url: String(url), + }) + + if (String(url).endsWith("/access_tokens")) { + return new Response(JSON.stringify({ token: "installation-token" }), { status: 201 }) + } + + if (String(url).endsWith("/contents/.claude-plugin/marketplace.json")) { + if (String(url).includes("different-ai/openwork")) { + const content = Buffer.from(JSON.stringify({ plugins: [{ name: "a" }, { name: "b" }, { name: "c" }] })).toString("base64") + return new Response(JSON.stringify({ content, encoding: "base64" }), { status: 200 }) + } + return new Response(JSON.stringify({ message: "not found" }), { status: 404 }) + } + + if (String(url).endsWith("/contents/.claude-plugin/plugin.json")) { + if (String(url).includes("different-ai/opencode")) { + return new Response(JSON.stringify({ name: "plugin.json" }), { status: 200 }) + } + return new Response(JSON.stringify({ message: "not found" }), { status: 404 }) + } + + return new Response(JSON.stringify({ + repositories: [ + { default_branch: "main", full_name: "different-ai/openwork", id: 42, private: true }, + { default_branch: "dev", full_name: "different-ai/opencode", id: 99, private: false }, + ], + }), { status: 200 }) + }, + installationId: 777, + }) + + expect(requests.map((request) => request.url)).toEqual([ + "https://api.github.com/app/installations/777/access_tokens", + "https://api.github.com/installation/repositories", + "https://api.github.com/repos/different-ai/openwork/contents/.claude-plugin/marketplace.json", + "https://api.github.com/repos/different-ai/opencode/contents/.claude-plugin/marketplace.json", + "https://api.github.com/repos/different-ai/opencode/contents/.claude-plugin/plugin.json", + ]) + expect(repositories).toEqual([ + { defaultBranch: "main", fullName: "different-ai/openwork", hasPluginManifest: true, id: 42, manifestKind: "marketplace", marketplacePluginCount: 3, private: true }, + { defaultBranch: "dev", fullName: "different-ai/opencode", hasPluginManifest: true, id: 99, manifestKind: "plugin", marketplacePluginCount: null, private: false }, + ]) + }) + + test("builds install URLs and validates signed state tokens", async () => { + const app = await getGithubAppSummary({ + config: { appId: "123456", privateKey: privateKeyPem }, + fetchFn: async () => new Response(JSON.stringify({ + html_url: "https://github.com/apps/openwork-test", + name: "OpenWork Test", + slug: "openwork-test", + }), { status: 200 }), + }) + + const token = createGithubInstallStateToken({ + now: new Date("2026-04-21T19:00:00.000Z"), + orgId: "org_123", + returnPath: "/o/test-org/dashboard/integrations/github", + secret: "secret-123", + userId: "user_123", + }) + + expect(buildGithubAppInstallUrl({ app, state: token })).toBe(`https://github.com/apps/openwork-test/installations/new?state=${encodeURIComponent(token)}`) + expect(verifyGithubInstallStateToken({ now: new Date("2026-04-21T19:05:00.000Z"), secret: "secret-123", token })).toMatchObject({ + orgId: "org_123", + returnPath: "/o/test-org/dashboard/integrations/github", + userId: "user_123", + }) + expect(verifyGithubInstallStateToken({ now: new Date("2026-04-21T19:05:00.000Z"), secret: "wrong-secret", token })).toBeNull() + }) + + test("reads GitHub installation account details", async () => { + const installation = await getGithubInstallationSummary({ + config: { appId: "123456", privateKey: privateKeyPem }, + fetchFn: async (url) => { + if (String(url).endsWith("/app/installations/777")) { + return new Response(JSON.stringify({ + account: { + login: "different-ai", + type: "Organization", + }, + id: 777, + }), { status: 200 }) + } + return new Response(JSON.stringify({ message: "not found" }), { status: 404 }) + }, + installationId: 777, + }) + + expect(installation).toEqual({ + accountLogin: "different-ai", + accountType: "Organization", + displayName: "different-ai", + installationId: 777, + repositorySelection: "all", + settingsUrl: null, + }) + }) + + test("validates repository identity and branch existence against GitHub", async () => { + const result = await validateGithubInstallationTarget({ + branch: "main", + config: { appId: "123456", privateKey: privateKeyPem }, + fetchFn: async (url) => { + if (String(url).endsWith("/access_tokens")) { + return new Response(JSON.stringify({ token: "installation-token" }), { status: 201 }) + } + + if (String(url).endsWith("/repos/different-ai/openwork")) { + return new Response(JSON.stringify({ + default_branch: "main", + full_name: "different-ai/openwork", + id: 42, + }), { status: 200 }) + } + + if (String(url).endsWith("/repos/different-ai/openwork/branches/main")) { + return new Response(JSON.stringify({ name: "main" }), { status: 200 }) + } + + return new Response(JSON.stringify({ message: "not found" }), { status: 404 }) + }, + installationId: 777, + ref: "refs/heads/main", + repositoryFullName: "different-ai/openwork", + repositoryId: 42, + }) + + expect(result).toEqual({ + branchExists: true, + defaultBranch: "main", + repositoryAccessible: true, + }) + }) +}) diff --git a/ee/apps/den-api/test/github-discovery.test.ts b/ee/apps/den-api/test/github-discovery.test.ts new file mode 100644 index 0000000000..45c175095f --- /dev/null +++ b/ee/apps/den-api/test/github-discovery.test.ts @@ -0,0 +1,107 @@ +import { describe, expect, test } from "bun:test" +import { buildGithubRepoDiscovery, type GithubDiscoveryTreeEntry } from "../src/routes/org/plugin-system/github-discovery.js" + +function blob(path: string): GithubDiscoveryTreeEntry { + return { id: path, kind: "blob", path, sha: null, size: null } +} + +describe("github discovery", () => { + test("classifies marketplace repos and resolves local plugin roots", () => { + const result = buildGithubRepoDiscovery({ + entries: [ + blob(".claude-plugin/marketplace.json"), + blob("plugins/sales/.claude-plugin/plugin.json"), + blob("plugins/sales/skills/hello/SKILL.md"), + blob("plugins/sales/commands/deploy.md"), + ], + fileTextByPath: { + ".claude-plugin/marketplace.json": JSON.stringify({ + plugins: [ + { name: "sales", description: "Sales workflows", source: "./plugins/sales" }, + ], + }), + "plugins/sales/.claude-plugin/plugin.json": JSON.stringify({ + name: "sales", + description: "Sales plugin", + }), + }, + }) + + expect(result.classification).toBe("claude_marketplace_repo") + expect(result.discoveredPlugins).toHaveLength(1) + expect(result.discoveredPlugins[0]).toMatchObject({ + displayName: "sales", + rootPath: "plugins/sales", + sourceKind: "marketplace_entry", + }) + expect(result.discoveredPlugins[0]?.componentPaths.skills).toEqual(["plugins/sales/skills"]) + expect(result.discoveredPlugins[0]?.componentPaths.commands).toEqual(["plugins/sales/commands"]) + }) + + test("treats marketplace source './' as the current repo root", () => { + const result = buildGithubRepoDiscovery({ + entries: [ + blob(".claude-plugin/marketplace.json"), + blob("skills/agent-browser/SKILL.md"), + blob("skills/other-skill/SKILL.md"), + ], + fileTextByPath: { + ".claude-plugin/marketplace.json": JSON.stringify({ + plugins: [ + { + name: "agent-browser", + description: "Automates browser interactions for web testing, form filling, screenshots, and data extraction", + source: "./", + strict: false, + skills: ["./skills/agent-browser"], + category: "development", + }, + ], + }), + }, + }) + + expect(result.classification).toBe("claude_marketplace_repo") + expect(result.warnings).toEqual([]) + expect(result.discoveredPlugins).toHaveLength(1) + expect(result.discoveredPlugins[0]).toMatchObject({ + displayName: "agent-browser", + rootPath: "", + sourceKind: "marketplace_entry", + supported: true, + }) + expect(result.discoveredPlugins[0]?.componentPaths.skills).toEqual(["skills/agent-browser"]) + }) + + test("treats non-Claude folder-only repos as unsupported", () => { + const result = buildGithubRepoDiscovery({ + entries: [ + blob("Sales/skills/pitch/SKILL.md"), + blob("Sales/commands/release.md"), + blob("finance/agents/reviewer.md"), + blob("finance/commands/audit.md"), + ], + fileTextByPath: { + "Sales/plugin.json": JSON.stringify({ name: "Sales", description: "Sales tools" }), + }, + }) + + expect(result.classification).toBe("unsupported") + expect(result.discoveredPlugins).toEqual([]) + expect(result.warnings[0]).toContain("only supports Claude-compatible plugins and marketplaces") + }) + + test("treats standalone .claude directories as unsupported without plugin manifests", () => { + const result = buildGithubRepoDiscovery({ + entries: [ + blob(".claude/skills/research/SKILL.md"), + blob(".claude/commands/publish.md"), + ], + fileTextByPath: {}, + }) + + expect(result.classification).toBe("unsupported") + expect(result.discoveredPlugins).toEqual([]) + expect(result.warnings[0]).toContain("only supports Claude-compatible plugins and marketplaces") + }) +}) diff --git a/ee/apps/den-api/test/github-webhook.test.ts b/ee/apps/den-api/test/github-webhook.test.ts new file mode 100644 index 0000000000..d2cbce69ca --- /dev/null +++ b/ee/apps/den-api/test/github-webhook.test.ts @@ -0,0 +1,91 @@ +import { afterEach, beforeAll, expect, test } from "bun:test" +import { Hono } from "hono" + +function seedRequiredEnv() { + process.env.DATABASE_URL = process.env.DATABASE_URL ?? "mysql://root:password@127.0.0.1:3306/openwork_test" + process.env.DEN_DB_ENCRYPTION_KEY = process.env.DEN_DB_ENCRYPTION_KEY ?? "x".repeat(32) + process.env.BETTER_AUTH_SECRET = process.env.BETTER_AUTH_SECRET ?? "y".repeat(32) + process.env.BETTER_AUTH_URL = process.env.BETTER_AUTH_URL ?? "http://127.0.0.1:8790" +} + +let envModule: typeof import("../src/env.js") +let githubModule: typeof import("../src/routes/webhooks/github.js") + +beforeAll(async () => { + seedRequiredEnv() + envModule = await import("../src/env.js") + githubModule = await import("../src/routes/webhooks/github.js") +}) + +afterEach(() => { + envModule.env.githubConnectorApp.webhookSecret = "super-secret" +}) + +function createWebhookApp() { + const app = new Hono() + githubModule.registerGithubWebhookRoutes(app) + return app +} + +test("webhook route rejects invalid signatures before JSON parsing", async () => { + envModule.env.githubConnectorApp.webhookSecret = "super-secret" + const app = createWebhookApp() + const response = await app.request("http://den.local/v1/webhooks/connectors/github", { + body: "{", + headers: { + "x-github-delivery": "delivery-1", + "x-github-event": "push", + "x-hub-signature-256": "sha256=wrong", + }, + method: "POST", + }) + + expect(response.status).toBe(401) + await expect(response.json()).resolves.toEqual({ ok: false, error: "invalid signature" }) +}) + +test("webhook route returns 503 when the GitHub webhook secret is unset", async () => { + envModule.env.githubConnectorApp.webhookSecret = undefined + const app = createWebhookApp() + const response = await app.request("http://den.local/v1/webhooks/connectors/github", { + body: "{}", + headers: { + "x-github-delivery": "delivery-2", + "x-github-event": "push", + "x-hub-signature-256": "sha256=unused", + }, + method: "POST", + }) + + expect(response.status).toBe(503) +}) + +test("webhook route accepts a valid signature and ignores unbound deliveries cleanly", async () => { + envModule.env.githubConnectorApp.webhookSecret = "super-secret" + const app = createWebhookApp() + const payload = JSON.stringify({ + after: "abc123", + ref: "refs/heads/main", + repository: { + full_name: "different-ai/openwork", + id: 42, + }, + }) + + const response = await app.request("http://den.local/v1/webhooks/connectors/github", { + body: payload, + headers: { + "x-github-delivery": "delivery-3", + "x-github-event": "push", + "x-hub-signature-256": githubModule.signGithubBody(payload, "super-secret"), + }, + method: "POST", + }) + + expect(response.status).toBe(200) + await expect(response.json()).resolves.toEqual({ + ok: true, + accepted: false, + reason: "missing installation id", + }) +}) diff --git a/ee/apps/den-api/test/org-invitations.test.ts b/ee/apps/den-api/test/org-invitations.test.ts new file mode 100644 index 0000000000..29b4096f56 --- /dev/null +++ b/ee/apps/den-api/test/org-invitations.test.ts @@ -0,0 +1,95 @@ +import { beforeAll, expect, test } from "bun:test" +import { Hono } from "hono" + +function seedRequiredEnv() { + process.env.DATABASE_URL = process.env.DATABASE_URL ?? "mysql://root:password@127.0.0.1:3306/openwork_test" + process.env.DEN_DB_ENCRYPTION_KEY = process.env.DEN_DB_ENCRYPTION_KEY ?? "x".repeat(32) + process.env.BETTER_AUTH_SECRET = process.env.BETTER_AUTH_SECRET ?? "y".repeat(32) + process.env.BETTER_AUTH_URL = process.env.BETTER_AUTH_URL ?? "http://127.0.0.1:8790" + process.env.CORS_ORIGINS = process.env.CORS_ORIGINS ?? "http://127.0.0.1:8790" +} + +let invitationModule: typeof import("../src/routes/org/invitations.js") +let orgRoutesModule: typeof import("../src/routes/org/index.js") +let userOrganizationsModule: typeof import("../src/middleware/user-organizations.js") + +beforeAll(async () => { + seedRequiredEnv() + invitationModule = await import("../src/routes/org/invitations.js") + orgRoutesModule = await import("../src/routes/org/index.js") + userOrganizationsModule = await import("../src/middleware/user-organizations.js") +}) + +function createOrgApp() { + const app = new Hono() + orgRoutesModule.registerOrgRoutes(app) + return app +} + +test("legacy org-scoped paths proxy into the unscoped handlers", async () => { + const app = createOrgApp() + const response = await app.request("http://den.local/v1/orgs/org_123/invitations", { + body: JSON.stringify({ email: "teammate@example.com", role: "admin" }), + headers: { + "content-type": "application/json", + }, + method: "POST", + }) + + expect(response.status).toBe(401) + await expect(response.json()).resolves.toEqual({ error: "unauthorized" }) +}) + +test("legacy org-scoped proxy also reaches non-invitation org resources", async () => { + const app = createOrgApp() + const response = await app.request("http://den.local/v1/orgs/org_123/teams", { + body: JSON.stringify({ memberIds: [], name: "Legacy Team" }), + headers: { + "content-type": "application/json", + }, + method: "POST", + }) + + expect(response.status).toBe(401) + await expect(response.json()).resolves.toEqual({ error: "unauthorized" }) +}) + +test("current org endpoints are not swallowed by the legacy proxy", async () => { + const app = createOrgApp() + const response = await app.request("http://den.local/v1/orgs/invitations/preview?id=bad", { + method: "GET", + }) + + expect(response.status).toBe(400) +}) + +test("invitation cancel still validates against the unscoped handler", async () => { + const app = new Hono() + invitationModule.registerOrgInvitationRoutes(app) + const response = await app.request("http://den.local/v1/invitations/invitation_123/cancel", { + method: "POST", + }) + + expect(response.status).toBe(401) + await expect(response.json()).resolves.toEqual({ error: "unauthorized" }) +}) + +test("session hydration only runs when a user session is missing an active organization", () => { + expect(userOrganizationsModule.shouldHydrateSessionActiveOrganization({ + scopedOrganizationId: null, + sessionActiveOrganizationId: null, + resolvedActiveOrganizationId: "organization_first", + })).toBe(true) + + expect(userOrganizationsModule.shouldHydrateSessionActiveOrganization({ + scopedOrganizationId: null, + sessionActiveOrganizationId: "organization_existing", + resolvedActiveOrganizationId: "organization_existing", + })).toBe(false) + + expect(userOrganizationsModule.shouldHydrateSessionActiveOrganization({ + scopedOrganizationId: "organization_scoped", + sessionActiveOrganizationId: null, + resolvedActiveOrganizationId: "organization_scoped", + })).toBe(false) +}) diff --git a/ee/apps/den-api/test/plugin-system-access.test.ts b/ee/apps/den-api/test/plugin-system-access.test.ts new file mode 100644 index 0000000000..ad68e2f545 --- /dev/null +++ b/ee/apps/den-api/test/plugin-system-access.test.ts @@ -0,0 +1,91 @@ +import { beforeAll, expect, test } from "bun:test" + +function seedRequiredEnv() { + process.env.DATABASE_URL = process.env.DATABASE_URL ?? "mysql://root:password@127.0.0.1:3306/openwork_test" + process.env.DEN_DB_ENCRYPTION_KEY = process.env.DEN_DB_ENCRYPTION_KEY ?? "x".repeat(32) + process.env.BETTER_AUTH_SECRET = process.env.BETTER_AUTH_SECRET ?? "y".repeat(32) + process.env.BETTER_AUTH_URL = process.env.BETTER_AUTH_URL ?? "http://127.0.0.1:8790" +} + +let accessModule: typeof import("../src/routes/org/plugin-system/access.js") + +beforeAll(async () => { + seedRequiredEnv() + accessModule = await import("../src/routes/org/plugin-system/access.js") +}) + +function createActorContext(input?: { isOwner?: boolean; role?: string; teamIds?: string[] }) { + return { + memberTeams: (input?.teamIds ?? []).map((teamId) => ({ + createdAt: new Date("2026-04-17T00:00:00.000Z"), + id: teamId, + name: teamId, + organizationId: "org_test", + updatedAt: new Date("2026-04-17T00:00:00.000Z"), + })), + organizationContext: { + currentMember: { + createdAt: new Date("2026-04-17T00:00:00.000Z"), + id: "member_current", + isOwner: input?.isOwner ?? false, + role: input?.role ?? "member", + userId: "user_current", + }, + }, + } as any +} + +test("org owners and admins get plugin-system capability access", () => { + expect(accessModule.isPluginArchOrgAdmin(createActorContext({ isOwner: true }))).toBe(true) + expect(accessModule.isPluginArchOrgAdmin(createActorContext({ role: "member,admin" }))).toBe(true) + expect(accessModule.isPluginArchOrgAdmin(createActorContext({ role: "member" }))).toBe(false) + + expect(accessModule.hasPluginArchCapability(createActorContext({ isOwner: true }), "plugin.create")).toBe(true) + expect(accessModule.hasPluginArchCapability(createActorContext({ role: "admin" }), "marketplace.create")).toBe(true) + expect(accessModule.hasPluginArchCapability(createActorContext({ role: "admin" }), "connector_instance.create")).toBe(true) + expect(accessModule.hasPluginArchCapability(createActorContext({ role: "member" }), "config_object.create")).toBe(false) +}) + +test("grant resolution supports direct, team, org-wide, and highest-role precedence", () => { + const grants = [ + { + orgMembershipId: null, + orgWide: true, + removedAt: null, + role: "viewer", + teamId: null, + }, + { + orgMembershipId: null, + orgWide: false, + removedAt: null, + role: "editor", + teamId: "team_alpha", + }, + { + orgMembershipId: "member_current", + orgWide: false, + removedAt: null, + role: "manager", + teamId: null, + }, + ] as const + + expect(accessModule.resolvePluginArchGrantRole({ grants: [...grants], memberId: "member_current", teamIds: ["team_alpha"] })).toBe("manager") + expect(accessModule.resolvePluginArchGrantRole({ grants: [...grants], memberId: "other_member", teamIds: ["team_alpha"] })).toBe("editor") + expect(accessModule.resolvePluginArchGrantRole({ grants: [...grants], memberId: "other_member", teamIds: [] })).toBe("viewer") +}) + +test("removed grants are ignored during resolution", () => { + expect(accessModule.resolvePluginArchGrantRole({ + grants: [{ + orgMembershipId: "member_current", + orgWide: false, + removedAt: new Date("2026-04-17T00:00:00.000Z"), + role: "manager", + teamId: null, + }], + memberId: "member_current", + teamIds: [], + })).toBeNull() +}) diff --git a/ee/apps/den-api/tsconfig.json b/ee/apps/den-api/tsconfig.json new file mode 100644 index 0000000000..21f5aeef55 --- /dev/null +++ b/ee/apps/den-api/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "rootDir": "src", + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "resolveJsonModule": true + }, + "include": ["src"] +} diff --git a/ee/apps/den-controller/README.md b/ee/apps/den-controller/README.md new file mode 100644 index 0000000000..ccf5839a1e --- /dev/null +++ b/ee/apps/den-controller/README.md @@ -0,0 +1,13 @@ +# Deprecated: `den-controller` + +`ee/apps/den-controller` has been replaced by `ee/apps/den-api` (`den-api`, formerly `den-controller`). + +Use `ee/apps/den-api` for: + +- auth endpoints +- org endpoints +- admin endpoints +- worker endpoints +- local and production Den control-plane work + +This directory is intentionally kept as a stub so older links resolve to a clear migration note. diff --git a/ee/apps/den-web/.gitignore b/ee/apps/den-web/.gitignore new file mode 100644 index 0000000000..272e211607 --- /dev/null +++ b/ee/apps/den-web/.gitignore @@ -0,0 +1,4 @@ +.vercel +.env +.env.* +.env*.local diff --git a/ee/apps/den-web/README.md b/ee/apps/den-web/README.md new file mode 100644 index 0000000000..f9f091d7e4 --- /dev/null +++ b/ee/apps/den-web/README.md @@ -0,0 +1,65 @@ +# OpenWork Cloud App (`ee/apps/den-web`) + +Frontend for `app.openworklabs.com`. + +## What it does + +- Signs up / signs in users against Den service auth. +- Handles invited-org signup flows where the invited email stays locked and the user verifies access before joining. +- Launches cloud workers via `POST /v1/workers`. +- Handles paywall responses (`402 payment_required`), routes users through Polar checkout, and only enables worker launch after purchase. +- Offers desktop handoff actions so users can open the generated worker directly in OpenWork or copy the connect credentials manually. +- Uses a Next.js proxy route (`/api/den/*`) to reach `api.openworklabs.com` without browser CORS issues. +- Uses a same-origin auth proxy (`/api/auth/*`) so GitHub OAuth callbacks can land on `app.openworklabs.com`. + +## Current hosted user flow + +1. Sign in with a standard provider or accept an org invite. +2. If the org requires billing, complete checkout before launching a worker. +3. Launch the worker from the cloud dashboard. +4. Open the worker in the desktop app with the provided deep link, or copy the URL/token into `Connect remote` manually. + +## Local development + +1. Install workspace deps from repo root: + `pnpm install` +2. Run the app: + `pnpm --filter @openwork-ee/den-web dev` +3. Open: + `http://localhost:3005` + +### Optional env vars + +- `DEN_API_BASE` (server-only): upstream API base used by proxy route. + - default: `https://api.openworklabs.com` +- `DEN_AUTH_ORIGIN` (server-only): Origin header sent to Better Auth endpoints when the browser request does not include one. + - default: `https://app.openworklabs.com` +- `DEN_AUTH_FALLBACK_BASE` (server-only): fallback Den origin used if `DEN_API_BASE` serves an HTML/5xx error. + - default: `https://den-control-plane-openwork.onrender.com` +- `NEXT_PUBLIC_OPENWORK_APP_CONNECT_URL` (client): Base URL for "Open in App" links. + - Example: `https://openworklabs.com/app` + - The web panel appends `/connect-remote` and injects worker URL/token params automatically. +- `NEXT_PUBLIC_OPENWORK_AUTH_CALLBACK_URL` (client): Canonical URL used for GitHub auth callback redirects. + - default: `https://app.openworklabs.com` + - this host must serve `/api/auth/*`; the included proxy route does that +- `NEXT_PUBLIC_POSTHOG_KEY` (client): PostHog project key used for Den analytics. + - optional override; defaults to the same project key used by `ee/apps/landing` +- `NEXT_PUBLIC_POSTHOG_HOST` (client): PostHog ingest host or same-origin proxy path. + - default: `/ow` + - set it to `https://us.i.posthog.com` to bypass the local proxy + +## Deploy on Vercel + +Recommended project settings: + +- Root directory: `ee/apps/den-web` +- Framework preset: Next.js +- Build command: `cd ../../.. && pnpm --filter @openwork-ee/den-web build` +- Output directory: `.next` +- Install command: `cd ../../.. && pnpm install --frozen-lockfile` + +These commands should be configured in the Vercel dashboard rather than committed in `vercel.json`, so the app still builds from the monorepo root and can resolve shared workspace packages like `@openwork-ee/utils`. + +Then assign custom domain: + +- `app.openworklabs.com` diff --git a/ee/apps/den-web/app/(den)/_components/auth-panel.tsx b/ee/apps/den-web/app/(den)/_components/auth-panel.tsx new file mode 100644 index 0000000000..493702824c --- /dev/null +++ b/ee/apps/den-web/app/(den)/_components/auth-panel.tsx @@ -0,0 +1,393 @@ +"use client"; + +import { ArrowRight, CheckCircle2 } from "lucide-react"; +import { usePathname, useRouter } from "next/navigation"; +import { useEffect, useRef, useState, type ReactNode } from "react"; +import { isSamePathname } from "../_lib/client-route"; +import type { AuthMode } from "../_lib/den-flow"; +import { getMcpOAuthSelectOrganizationRoute } from "../_lib/mcp-oauth-route"; +import { useDenFlow } from "../_providers/den-flow-provider"; + +type PanelContent = { + title: string; + copy: string; + submitLabel: string; + togglePrompt?: string; + toggleActionLabel?: string; +}; + +function getDesktopGrant(url: string | null) { + if (!url) return null; + try { + const parsed = new URL(url); + const grant = parsed.searchParams.get("grant")?.trim() ?? ""; + return grant || null; + } catch { + return null; + } +} + +function GitHubLogo() { + return ( + + ); +} + +function GoogleLogo() { + return ( + + ); +} + +function SocialButton({ + children, + onClick, + disabled, +}: { + children: ReactNode; + onClick: () => void; + disabled: boolean; +}) { + return ( + + ); +} + +export function AuthPanel({ + prefilledEmail, + prefillKey, + initialMode = "sign-up", + lockEmail = false, + hideSocialAuth = false, + hideEmailField = false, + eyebrow = "Account", + signUpContent, + signInContent, + verificationContent, +}: { + prefilledEmail?: string; + prefillKey?: string; + initialMode?: AuthMode; + lockEmail?: boolean; + hideSocialAuth?: boolean; + hideEmailField?: boolean; + eyebrow?: string; + signUpContent?: Partial; + signInContent?: Partial; + verificationContent?: Partial; +}) { + const router = useRouter(); + const pathname = usePathname(); + const prefillRef = useRef(null); + const [copiedDesktopField, setCopiedDesktopField] = useState<"link" | "code" | null>(null); + const { + authMode, + setAuthMode, + email, + setEmail, + password, + setPassword, + verificationCode, + setVerificationCode, + verificationRequired, + authBusy, + authInfo, + authError, + desktopAuthRequested, + desktopRedirectUrl, + desktopRedirectBusy, + showAuthFeedback, + submitAuth, + submitVerificationCode, + resendVerificationCode, + cancelVerification, + beginSocialAuth, + resolveUserLandingRoute, + } = useDenFlow(); + + const resolvedSignUpContent: PanelContent = { + title: "Get started.", + copy: "Free to try. Team plans from $50/mo.", + submitLabel: "Create account", + togglePrompt: "Have an account?", + toggleActionLabel: "Sign in", + ...signUpContent, + }; + + const resolvedSignInContent: PanelContent = { + title: "Welcome back.", + copy: "Sign in to open your team workspace.", + submitLabel: "Sign in", + togglePrompt: "Need an account?", + toggleActionLabel: "Create one", + ...signInContent, + }; + + const resolvedVerificationContent: PanelContent = { + title: "Verify your email.", + copy: "Enter the six-digit code from your inbox.", + submitLabel: "Verify email", + ...verificationContent, + }; + + const desktopGrant = getDesktopGrant(desktopRedirectUrl); + const activeContent = verificationRequired + ? resolvedVerificationContent + : authMode === "sign-in" + ? resolvedSignInContent + : resolvedSignUpContent; + const showLockedEmailSummary = Boolean(prefilledEmail && lockEmail && hideEmailField); + + useEffect(() => { + const key = prefillKey ?? prefilledEmail?.trim() ?? null; + if (!key || prefillRef.current === key) { + return; + } + + prefillRef.current = key; + setAuthMode(initialMode); + setEmail(prefilledEmail?.trim() ?? ""); + setPassword(""); + setVerificationCode(""); + }, [initialMode, prefillKey, prefilledEmail, setAuthMode, setEmail, setPassword, setVerificationCode]); + + const copyDesktopValue = async (field: "link" | "code", value: string | null) => { + if (!value) return; + await navigator.clipboard.writeText(value); + setCopiedDesktopField(field); + window.setTimeout(() => { + setCopiedDesktopField((current) => (current === field ? null : current)); + }, 1800); + }; + + return ( +
+
+

{eyebrow}

+
+

{activeContent.title}

+

{activeContent.copy}

+
+
+ + {desktopAuthRequested ? ( +
+

Finish sign-in here, then jump back into the OpenWork desktop app.

+ {desktopRedirectUrl ? ( +
+
+ + + {desktopGrant ? ( + + ) : null} +
+

+ If OpenWork does not open automatically, copy the sign-in link or one-time code and paste it into the OpenWork desktop app. +

+
+ ) : null} +
+ ) : null} + +
{ + const next = verificationRequired + ? await submitVerificationCode(event) + : await submitAuth(event); + const oauthRoute = typeof window === "undefined" ? null : getMcpOAuthSelectOrganizationRoute(window.location.search); + if (next && oauthRoute) { + router.replace(oauthRoute); + return; + } + if (next === "dashboard" || next === "join-org") { + const target = await resolveUserLandingRoute(); + if (target && !isSamePathname(pathname, target)) { + router.replace(target); + } + } else if (next === "checkout" && !isSamePathname(pathname, "/checkout")) { + router.replace("/checkout"); + } + }} + > + {!verificationRequired && !hideSocialAuth ? ( + <> + void beginSocialAuth("github")} + disabled={authBusy || desktopRedirectBusy} + > + + Continue with GitHub + + + void beginSocialAuth("google")} + disabled={authBusy || desktopRedirectBusy} + > + + Continue with Google + + + + + ) : null} + + {showLockedEmailSummary ? ( +
+

Invited email

+

{prefilledEmail}

+
+ ) : null} + + {!hideEmailField ? ( + + ) : null} + + {!verificationRequired ? ( + + ) : ( + + )} + + + + {verificationRequired ? ( +
+ + +
+ ) : null} +
+ + {!verificationRequired ? ( +
+

+ {authMode === "sign-in" + ? resolvedSignInContent.togglePrompt + : resolvedSignUpContent.togglePrompt} +

+ +
+ ) : null} + + {showAuthFeedback ? ( +
+

{authInfo}

+ {authError ?

{authError}

: null} + {!authError && verificationRequired ? ( +
+ + Waiting for your verification code +
+ ) : null} +
+ ) : null} +
+ ); +} diff --git a/ee/apps/den-web/app/(den)/_components/auth-screen.tsx b/ee/apps/den-web/app/(den)/_components/auth-screen.tsx new file mode 100644 index 0000000000..4bbb202ae5 --- /dev/null +++ b/ee/apps/den-web/app/(den)/_components/auth-screen.tsx @@ -0,0 +1,153 @@ +"use client"; + +import { PaperMeshGradient } from "@openwork/ui/react"; +import { Dithering } from "@paper-design/shaders-react"; +import { usePathname, useRouter } from "next/navigation"; +import { useEffect, useRef } from "react"; +import { isSamePathname } from "../_lib/client-route"; +import { getMcpOAuthSelectOrganizationRoute } from "../_lib/mcp-oauth-route"; +import { useDenFlow } from "../_providers/den-flow-provider"; +import { AuthPanel } from "./auth-panel"; + +function FeatureCard({ title, body }: { title: string; body: string }) { + return ( +
+

{title}

+

{body}

+
+ ); +} + +function LoadingPanel({ title, body }: { title: string; body: string }) { + return ( +
+
+

OpenWork Cloud

+

{title}

+

{body}

+
+
+
+
+
+ ); +} + +export function AuthScreen() { + const router = useRouter(); + const pathname = usePathname(); + const routingRef = useRef(false); + const { user, sessionHydrated, desktopAuthRequested, resolveUserLandingRoute } = useDenFlow(); + const hasResolvedSession = sessionHydrated && Boolean(user) && !desktopAuthRequested; + + useEffect(() => { + if (!hasResolvedSession || routingRef.current) { + return; + } + + const oauthRoute = typeof window === "undefined" ? null : getMcpOAuthSelectOrganizationRoute(window.location.search); + if (oauthRoute && !isSamePathname(pathname, oauthRoute)) { + router.replace(oauthRoute); + return; + } + + routingRef.current = true; + void resolveUserLandingRoute() + .then((target) => { + if (target && !isSamePathname(pathname, target)) { + router.replace(target); + } + }) + .finally(() => { + routingRef.current = false; + }); + }, [hasResolvedSession, pathname, resolveUserLandingRoute, router]); + + if (!sessionHydrated) { + return ( +
+ +
+ ); + } + + return ( +
+
+
+
+
+ + + +
+ +
+
+ OpenWork + OpenWork Cloud +
+ +
+ + OpenWork Cloud + +

+ One setup, every seat. +

+

+ Configure once. Your whole team gets the same tools, agents, and providers. +

+
+
+
+ +
+ + + +
+
+ +
+ {hasResolvedSession ? ( + + ) : ( + + )} +
+
+
+ ); +} diff --git a/ee/apps/den-web/app/(den)/_components/checkout-screen.tsx b/ee/apps/den-web/app/(den)/_components/checkout-screen.tsx new file mode 100644 index 0000000000..f0f4eed042 --- /dev/null +++ b/ee/apps/den-web/app/(den)/_components/checkout-screen.tsx @@ -0,0 +1,361 @@ +"use client"; + +import { usePathname, useRouter } from "next/navigation"; +import { useEffect, useRef, useState } from "react"; +import { isSamePathname } from "../_lib/client-route"; +import { formatMoneyMinor } from "../_lib/den-flow"; +import { useDenFlow } from "../_providers/den-flow-provider"; + +// For local layout testing (no deploy needed) +// Enable with: NEXT_PUBLIC_DEN_MOCK_BILLING=1 +const MOCK_BILLING = process.env.NEXT_PUBLIC_DEN_MOCK_BILLING === "1"; +const MOCK_CHECKOUT_URL = (process.env.NEXT_PUBLIC_DEN_MOCK_CHECKOUT_URL ?? "").trim() || null; + +function formatSubscriptionStatus(value: string | null | undefined) { + if (!value) return "Purchase required"; + return value + .split(/[_\s]+/) + .filter(Boolean) + .map((part) => part.slice(0, 1).toUpperCase() + part.slice(1).toLowerCase()) + .join(" "); +} + +function LoadingPanel({ title, body }: { title: string; body: string }) { + return ( +
+
+

{title}

+

{body}

+
+
+ ); +} + +export function CheckoutScreen({ customerSessionToken }: { customerSessionToken: string | null }) { + const router = useRouter(); + const pathname = usePathname(); + const handledReturnRef = useRef(false); + const redirectingRef = useRef(false); + const [resuming, setResuming] = useState(false); + const [redirectMessage, setRedirectMessage] = useState(null); + const { + user, + sessionHydrated, + billingSummary: realBillingSummary, + billingBusy, + billingCheckoutBusy, + billingError, + effectiveCheckoutUrl, + onboardingPending, + refreshBilling, + refreshCheckoutReturn, + resolveUserLandingRoute, + } = useDenFlow(); + + const mockMode = MOCK_BILLING && process.env.NODE_ENV !== "production"; + + const billingSummary = MOCK_BILLING + ? { + featureGateEnabled: true, + hasActivePlan: false, + checkoutRequired: true, + checkoutUrl: MOCK_CHECKOUT_URL, + portalUrl: null, + price: { amount: 5000, currency: "usd", recurringInterval: "month", recurringIntervalCount: 1 }, + subscription: null, + invoices: [], + productId: null, + benefitId: null, + } + : realBillingSummary; + + useEffect(() => { + if (!sessionHydrated || resuming || user || mockMode) { + return; + } + + setRedirectMessage("Redirecting to sign in..."); + if (!isSamePathname(pathname, "/")) { + router.replace("/"); + } + }, [mockMode, pathname, resuming, router, sessionHydrated, user]); + + useEffect(() => { + if (!sessionHydrated || !user || handledReturnRef.current || !customerSessionToken) { + return; + } + + handledReturnRef.current = true; + setResuming(true); + setRedirectMessage("Finishing your checkout..."); + + void refreshCheckoutReturn(true) + .then((target) => { + if (target && !isSamePathname(pathname, target)) { + router.replace(target); + return; + } + + setRedirectMessage(null); + setResuming(false); + }) + .catch(() => { + setRedirectMessage(null); + setResuming(false); + }); + }, [customerSessionToken, pathname, refreshCheckoutReturn, router, sessionHydrated, user]); + + useEffect(() => { + if (!sessionHydrated || !user || resuming) { + return; + } + + if (!billingSummary?.hasActivePlan && !effectiveCheckoutUrl && !billingBusy && !billingCheckoutBusy) { + void refreshBilling({ includeCheckout: true, quiet: true }); + } + }, [ + billingBusy, + billingCheckoutBusy, + billingSummary?.hasActivePlan, + effectiveCheckoutUrl, + refreshBilling, + resuming, + sessionHydrated, + user, + ]); + + useEffect(() => { + if ( + !sessionHydrated || + !user || + resuming || + onboardingPending || + mockMode || + redirectingRef.current || + billingBusy || + billingCheckoutBusy || + !billingSummary || + (billingSummary.featureGateEnabled && !billingSummary.hasActivePlan) + ) { + return; + } + + redirectingRef.current = true; + void resolveUserLandingRoute() + .then((target) => { + if (target && !isSamePathname(pathname, target)) { + setRedirectMessage("Redirecting to your workspace..."); + router.replace(target); + return; + } + + setRedirectMessage(null); + }) + .finally(() => { + redirectingRef.current = false; + }); + }, [ + billingBusy, + billingCheckoutBusy, + billingSummary, + mockMode, + onboardingPending, + pathname, + resolveUserLandingRoute, + resuming, + router, + sessionHydrated, + user, + ]); + + if (!sessionHydrated || (!user && !mockMode)) { + return ( + + ); + } + + if (redirectMessage) { + return ; + } + + const billingPrice = billingSummary?.price ?? null; + const showLoading = resuming || (billingBusy && !billingSummary && !MOCK_BILLING); + const checkoutHref = effectiveCheckoutUrl ?? MOCK_CHECKOUT_URL ?? null; + const planAmountLabel = + billingPrice && billingPrice.amount !== null + ? `${formatMoneyMinor(billingPrice.amount, billingPrice.currency)}/${billingPrice.recurringInterval}` + : "$50.00/month"; + const subscription = billingSummary?.subscription ?? null; + const subscriptionStatus = formatSubscriptionStatus(subscription?.status); + + return ( +
+
+
+
+

OpenWork Cloud

+

Purchase a plan before creating your workspace.

+

+ Start with one workspace plan for $50/month. Each plan includes up to 5 members and 1 hosted worker. +

+
+ +
+ {checkoutHref ? ( + + Purchase plan — $50/month + + ) : ( + + )} + + Use desktop only + +
+ +
+ $50/month per workspace + + {planAmountLabel} billed monthly + + {user?.email ?? "Signed in"} +
+
+
+ + {billingError ?
{billingError}
: null} + {showLoading ? ( +
+ Refreshing access state... +
+ ) : null} + + {billingSummary ? ( +
+
+
+
+ OpenWork Cloud +

Share your setup across your team.

+

+ Manage your team's setup, invite teammates, and keep everything in sync. +

+
+ +
+
Share setup across your team and org
+
Background agents in alpha for selected workflows
+
Custom LLM providers with team access controls
+
+ +
+
+

Background agents

+

+ Keep selected workflows running in the background. Alpha. +

+
+
+

LLM providers

+

+ Standardize provider access, model selection, and team rollout. +

+
+
+
+ +
+
+ Desktop app +

Stay local when you need to.

+

+ Run locally for free, keep your data on your machine, and add OpenWork Cloud when your team is ready. +

+
+ +
+
Run locally for free
+
Keep data on your machine
+
Move into OpenWork Cloud later
+
+ + +
+
+ + +
+ ) : null} +
+ ); +} diff --git a/ee/apps/den-web/app/(den)/_components/dashboard-redirect-screen.tsx b/ee/apps/den-web/app/(den)/_components/dashboard-redirect-screen.tsx new file mode 100644 index 0000000000..3cbb88dfd3 --- /dev/null +++ b/ee/apps/den-web/app/(den)/_components/dashboard-redirect-screen.tsx @@ -0,0 +1,39 @@ +"use client"; + +import { useEffect, useRef } from "react"; +import { usePathname, useRouter } from "next/navigation"; +import { isSamePathname } from "../_lib/client-route"; +import { useDenFlow } from "../_providers/den-flow-provider"; + +export function DashboardRedirectScreen() { + const router = useRouter(); + const pathname = usePathname(); + const redirectingRef = useRef(false); + const { resolveUserLandingRoute, sessionHydrated } = useDenFlow(); + + useEffect(() => { + if (!sessionHydrated || redirectingRef.current) { + return; + } + + redirectingRef.current = true; + void resolveUserLandingRoute() + .then((target) => { + const nextTarget = target ?? "/"; + if (!isSamePathname(pathname, nextTarget)) { + router.replace(nextTarget); + } + }) + .finally(() => { + redirectingRef.current = false; + }); + }, [pathname, resolveUserLandingRoute, router, sessionHydrated]); + + return ( +
+

OpenWork Cloud

+

Loading your workspace.

+

Routing you to the right organization and billing destination now.

+
+ ); +} diff --git a/ee/apps/den-web/app/(den)/_components/dashboard-screen.tsx b/ee/apps/den-web/app/(den)/_components/dashboard-screen.tsx new file mode 100644 index 0000000000..1ecf7e9e3d --- /dev/null +++ b/ee/apps/den-web/app/(den)/_components/dashboard-screen.tsx @@ -0,0 +1,679 @@ +"use client"; + +import Link from "next/link"; +import { useEffect } from "react"; +import { useRouter } from "next/navigation"; +import { getWorkerStatusCopy, getWorkerStatusMeta } from "../_lib/den-flow"; +import { useDenFlow } from "../_providers/den-flow-provider"; + +type IconProps = { + className?: string; +}; + +function CubeIcon({ className = "h-5 w-5" }: IconProps) { + return ( + + ); +} + +function MonitorIcon({ className = "h-5 w-5" }: IconProps) { + return ( + + ); +} + +function GlobeIcon({ className = "h-5 w-5" }: IconProps) { + return ( + + ); +} + +function LockIcon({ className = "h-5 w-5" }: IconProps) { + return ( + + ); +} + +function TerminalIcon({ className = "h-5 w-5" }: IconProps) { + return ( + + ); +} + +function ActivityIcon({ className = "h-5 w-5" }: IconProps) { + return ( + + ); +} + +function RefreshIcon({ className = "h-4 w-4" }: IconProps) { + return ( + + ); +} + +function CredentialRow({ + label, + value, + placeholder, + hint, + canCopy, + copied, + onCopy, + muted = false +}: { + label: string; + value: string | null; + placeholder: string; + hint?: string; + canCopy: boolean; + copied: boolean; + onCopy: () => void; + muted?: boolean; +}) { + return ( + + ); +} + +function SkeletonBar({ widthClass }: { widthClass: string }) { + return