diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000000..9704601ae9 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,11 @@ +FROM golang:1.24 + +RUN apt-get update && apt-get install -y sudo +RUN curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \ + apt-get install -y nodejs + +ADD scripts /scripts +RUN bash /scripts/install.sh +RUN bash /scripts/godeps.sh + +ENV ENCORE_GOROOT=/encore-release/encore-go diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..e5a5e6c09d --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,10 @@ +{ + "build": {"dockerfile": "Dockerfile"}, + "containerEnv": { + "ENCORE_DAEMON_DEV": "1", + "ENCORE_RUNTIMES_PATH": "${containerWorkspaceFolder}/runtimes" + }, + "extensions": ["golang.go"], + "postCreateCommand": "bash /scripts/prepare.sh", + "forwardPorts": [4000, 9400] +} diff --git a/.devcontainer/scripts/godeps.sh b/.devcontainer/scripts/godeps.sh new file mode 100644 index 0000000000..f887beb36a --- /dev/null +++ b/.devcontainer/scripts/godeps.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env +set -ex + +go install github.com/uudashr/gopkgs/v2/cmd/gopkgs@latest +go install github.com/ramya-rao-a/go-outline@latest +go install github.com/cweill/gotests/gotests@latest +go install github.com/fatih/gomodifytags@latest +go install github.com/josharian/impl@latest +go install github.com/haya14busa/goplay/cmd/goplay@latest +go install github.com/go-delve/delve/cmd/dlv@latest +go install honnef.co/go/tools/cmd/staticcheck@master +go install golang.org/x/tools/gopls@latest + +GOBIN=/tmp/ go install github.com/go-delve/delve/cmd/dlv@master +mv /tmp/dlv $GOPATH/bin/dlv-dap diff --git a/.devcontainer/scripts/install.sh b/.devcontainer/scripts/install.sh new file mode 100644 index 0000000000..b9b9fcf314 --- /dev/null +++ b/.devcontainer/scripts/install.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash +set -e + +target="$(go env GOOS)_$(go env GOARCH)" + +encore_uri=$(curl -sSf -N "https://encore.dev/api/releases?target=${target}&show=url") +if [ ! "$encore_uri" ]; then + echo "Error: Unable to determine latest Encore release." 1>&2 + exit 1 +fi + +encore_install="/encore-release" +bin_dir="$encore_install/bin" +exe="$bin_dir/encore" +tar="$encore_install/encore.tar.gz" + +if [ ! -d "$bin_dir" ]; then + mkdir -p "$bin_dir" +fi + +curl --fail --location --progress-bar --output "$tar" "$encore_uri" +cd "$encore_install" +tar -C "$encore_install" -xzf "$tar" +chmod +x "$bin_dir"/* +rm "$tar" + +"$exe" version + +echo "Encore was installed successfully to $exe" +if command -v encore >/dev/null; then + echo "Run 'encore --help' to get started" +else + case $SHELL in + /bin/zsh) shell_profile=".zshrc" ;; + *) shell_profile=".bash_profile" ;; + esac + echo "Manually add the directory to your \$HOME/$shell_profile (or similar)" + echo " export ENCORE_INSTALL=\"$encore_install\"" + echo " export PATH=\"\$ENCORE_INSTALL/bin:\$PATH\"" + echo "Run '$exe --help' to get started" +fi \ No newline at end of file diff --git a/.devcontainer/scripts/prepare.sh b/.devcontainer/scripts/prepare.sh new file mode 100644 index 0000000000..dce83401f9 --- /dev/null +++ b/.devcontainer/scripts/prepare.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -e +set -x + +go mod download diff --git a/.github/dockerimg/Dockerfile b/.github/dockerimg/Dockerfile new file mode 100644 index 0000000000..34ccf199e6 --- /dev/null +++ b/.github/dockerimg/Dockerfile @@ -0,0 +1,17 @@ +# syntax=docker/dockerfile:1.4 +FROM --platform=$TARGETPLATFORM ubuntu:22.04 AS build +ARG TARGETPLATFORM +ARG RELEASE_VERSION +RUN mkdir /encore +ADD rename-binary-if-needed.bash rename-binary-if-needed.bash +ADD artifacts /artifacts +RUN /bin/bash -c 'SRC=encore-$(echo $TARGETPLATFORM | tr '/' '_'); tar -C /encore -xzf /artifacts/$SRC.tar.gz' +RUN /bin/bash rename-binary-if-needed.bash + +FROM --platform=$TARGETPLATFORM ubuntu:22.04 +RUN apt-get update && apt-get install -y -f ca-certificates +ENV PATH="/encore/bin:${PATH}" +WORKDIR /src +ADD encore-entrypoint.bash /bin/encore-entrypoint.bash +ENTRYPOINT ["/bin/encore-entrypoint.bash"] +COPY --from=build /encore /encore diff --git a/.github/dockerimg/encore-entrypoint.bash b/.github/dockerimg/encore-entrypoint.bash new file mode 100755 index 0000000000..00ede15a7d --- /dev/null +++ b/.github/dockerimg/encore-entrypoint.bash @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +set -eo pipefail + +# If the ENCORE_AUTHKEY environment variable is set, log in with it. +if [ -n "$ENCORE_AUTHKEY" ]; then + echo "Logging in to Encore using provided auth key..." + encore auth login --auth-key "$ENCORE_AUTHKEY" +fi + +# Run the encore command. +encore "$@" diff --git a/.github/dockerimg/rename-binary-if-needed.bash b/.github/dockerimg/rename-binary-if-needed.bash new file mode 100755 index 0000000000..8dfca402ce --- /dev/null +++ b/.github/dockerimg/rename-binary-if-needed.bash @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -eo pipefail + +# Check if `encore-nightly`, `encore-beta` or `encore-develop` are present, and if one of them are, rename it to `encore`. +for binary in encore-nightly encore-beta encore-develop; do + if [ -f "/encore/bin/$binary" ]; then + echo "Renaming $binary to encore..." + mv /encore/bin/$binary /encore/bin/encore + fi +done + +# Sanity check that /ecore/bin/encore exists. +if [ ! -f "/encore/bin/encore" ]; then + echo "ERROR: /encore/bin/encore does not exist. Did you mount the Encore binary directory to /encore/bin?" + exit 1 +fi diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..b382785bd0 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,274 @@ +name: CI + +on: + push: + branches: + - main + pull_request: + branches: + - main + schedule: + - cron: "30 2 * * *" # Every night at 2:30am UTC (if you change this schedule, also change the if statement in the test steps) + +jobs: + build: + name: "Build" + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v4 + with: + path: encr.dev + + - name: Set up Node + uses: actions/setup-node@v3 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version-file: "encr.dev/go.mod" + check-latest: true + cache-dependency-path: "encr.dev/go.sum" + + - name: Build + run: cd encr.dev && go build ./... + + - name: Build for Windows + run: cd encr.dev && go build ./... + env: + GOOS: windows + + test: + name: "Test" + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v4 + with: + path: encr.dev + + - name: Set up Node + uses: actions/setup-node@v3 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version-file: "encr.dev/go.mod" + check-latest: true + cache-dependency-path: "encr.dev/go.sum" + + - name: Set up Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - name: Install Protoc + uses: arduino/setup-protoc@a8b67ba40b37d35169e222f3bb352603327985b6 # v2 + - name: Set up cargo cache + uses: actions/cache@v3 + continue-on-error: false + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo + + - name: Install encore-go + run: | + URL=$(curl -s https://api.github.com/repos/encoredev/go/releases/latest | grep "browser_download_url.*linux_x86-64.tar.gz" | cut -d : -f 2,3 | tr -d \" | tr -d '[:space:]') + curl --fail -L -o encore-go.tar.gz $URL && tar -C . -xzf ./encore-go.tar.gz + + - name: Install tsparser + run: cargo install --path encr.dev/tsparser --force --debug + + # If we're not running on a schedule, we only want to run tests on changed code + - name: Run tests on changed code on the CLI + run: cd encr.dev && go test -short -tags=dev_build 2>&1 ./... + if: github.event.schedule != '30 2 * * *' + env: + ENCORE_GOROOT: ${{ github.workspace }}/encore-go + ENCORE_RUNTIMES_PATH: ${{ github.workspace }}/encr.dev/runtimes + + - name: Run tests on changed runtime code + run: cd encr.dev/runtimes/go && go test -short -tags=dev_build ./... + if: github.event.schedule != '30 2 * * *' + + # Each night we want to run all tests multiple times to catch any flaky tests + # We will shuffle the order in which tests are run and run them 25 times looking + # for failures. We will also fail fast so that we don't waste time running tests + # that are already failing. + - name: Run all tests multiple times on the CLI + run: cd encr.dev && go test -v --count=5 -failfast -shuffle=on -timeout=30m -tags=dev_build ./... + if: github.event.schedule == '30 2 * * *' + env: + ENCORE_GOROOT: ${{ github.workspace }}/encore-go + ENCORE_RUNTIMES_PATH: ${{ github.workspace }}/encr.dev/runtimes + + - name: Run all tests multiple times on the runtime + run: cd encr.dev/runtimes/go && go test -v --count=5 -failfast -shuffle=on -timeout=30m -tags=dev_build ./... + if: github.event.schedule == '30 2 * * *' + + - name: Report Nightly Failure + uses: ravsamhq/notify-slack-action@bca2d7f5660b833a27bda4f6b8bef389ebfefd25 + if: ${{ failure() && github.event.schedule == '30 2 * * *' }} + with: + status: ${{ job.status }} # required + notification_title: "{workflow} has {status_message}" + message_format: "{emoji} *{workflow}* {status_message} in <{repo_url}|{repo}>" + footer: "Linked Repo <{repo_url}|{repo}> | <{workflow_url}|View Workflow>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_ALERT_WEBHOOK_URL }} # required + + test-e2e: + name: "Test e2e" + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v4 + with: + path: encr.dev + + - name: Set up Node + uses: actions/setup-node@v3 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version-file: "encr.dev/go.mod" + check-latest: true + cache-dependency-path: "encr.dev/go.sum" + + - name: Set up Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - name: Install Protoc + uses: arduino/setup-protoc@a8b67ba40b37d35169e222f3bb352603327985b6 # v2 + - name: Set up cargo cache + uses: actions/cache@v3 + continue-on-error: false + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo + + - name: Install encore-go + run: | + URL=$(curl -s https://api.github.com/repos/encoredev/go/releases/latest | grep "browser_download_url.*linux_x86-64.tar.gz" | cut -d : -f 2,3 | tr -d \" | tr -d '[:space:]') + curl --fail -L -o encore-go.tar.gz $URL && tar -C . -xzf ./encore-go.tar.gz + + - name: Install tsparser + run: cargo install --path encr.dev/tsparser --force --debug + + - name: Install tsbundler + run: cd encr.dev && go install ./cli/cmd/tsbundler-encore + + - name: Build jsruntime + run: cd encr.dev && go run ./pkg/encorebuild/cmd/build-local-binary encore-runtime.node + + # If we're not running on a schedule, we only want to run tests on changed code + - name: Run tests on changed code on the CLI + run: cd encr.dev && go test -short -tags=e2e 2>&1 ./e2e-tests + if: github.event.schedule != '30 2 * * *' + env: + ENCORE_GOROOT: ${{ github.workspace }}/encore-go + ENCORE_RUNTIMES_PATH: ${{ github.workspace }}/encr.dev/runtimes + + # Each night we want to run all tests multiple times to catch any flaky tests + # We will shuffle the order in which tests are run and run them 25 times looking + # for failures. We will also fail fast so that we don't waste time running tests + # that are already failing. + - name: Run all tests multiple times on the CLI + run: cd encr.dev && go test -v --count=5 -failfast -shuffle=on -timeout=30m -tags=e2e ./e2e-tests + if: github.event.schedule == '30 2 * * *' + env: + ENCORE_GOROOT: ${{ github.workspace }}/encore-go + ENCORE_RUNTIMES_PATH: ${{ github.workspace }}/encr.dev/runtimes + + - name: Report Nightly Failure + uses: ravsamhq/notify-slack-action@bca2d7f5660b833a27bda4f6b8bef389ebfefd25 + if: ${{ failure() && github.event.schedule == '30 2 * * *' }} + with: + status: ${{ job.status }} # required + notification_title: "{workflow} has {status_message}" + message_format: "{emoji} *{workflow}* {status_message} in <{repo_url}|{repo}>" + footer: "Linked Repo <{repo_url}|{repo}> | <{workflow_url}|View Workflow>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_ALERT_WEBHOOK_URL }} # required + + # Run static analysis on the PR + static-analysis: + name: "Static Analysis" + # We're using buildjet for this as it's very slow on Github's own runners + runs-on: buildjet-4vcpu-ubuntu-2204 + + # Skip any PR created by dependabot to avoid permission issues: + if: (github.actor != 'dependabot[bot]') + + permissions: + checks: write + contents: read + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Install jq + uses: dcarbone/install-jq-action@91d8da7268538e8a0ae0c8b72af44f1763228455 + + - name: Install semgrep + run: | + python3 -m pip install semgrep + python3 -m pip install --upgrade requests + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version-file: "go.mod" + cache: false + + - name: Install ci tools + run: | + go install honnef.co/go/tools/cmd/staticcheck@master + go install github.com/kisielk/errcheck@latest + go install github.com/gordonklaus/ineffassign@latest + + rust_core: + name: "Test core runtime" + runs-on: ubuntu-latest + steps: + - name: Checkout codebase + uses: actions/checkout@v4 + - name: Set up Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + components: rustfmt,clippy + - name: Install Protoc + uses: arduino/setup-protoc@a8b67ba40b37d35169e222f3bb352603327985b6 # v2 + - name: Set up cargo cache + uses: actions/cache@v3 + continue-on-error: false + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo + - uses: taiki-e/install-action@nextest + - name: Run test + run: cargo nextest run + env: + CARGO_TERM_COLOR: always + - name: Run rustfmt + run: cargo fmt --all --check + - name: Run clippy + run: cargo clippy --all-targets --all-features -- -D warnings diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml deleted file mode 100644 index c94b3bd520..0000000000 --- a/.github/workflows/go.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Go - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - path: encr.dev - - - name: Set up Node - uses: actions/setup-node@v2.1.5 - - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.16 - - - id: go-cache-paths - run: | - echo "::set-output name=go-build::$(go env GOCACHE)" - echo "::set-output name=go-mod::$(go env GOMODCACHE)" - - - name: Install encore-go - run: curl -o encore-go.tar.gz https://d2f391esomvqpi.cloudfront.net/encore-go-0.9.6-linux_amd64.tar.gz && tar -C . -xzf ./encore-go.tar.gz - - - name: Go Build Cache - uses: actions/cache@v2 - with: - path: ${{ steps.go-cache-paths.outputs.go-build }} - key: ${{ runner.os }}-go-build-${{ hashFiles('**/go.sum') }} - - - name: Go Mod Cache - uses: actions/cache@v2 - with: - path: ${{ steps.go-cache-paths.outputs.go-mod }} - key: ${{ runner.os }}-go-mod-${{ hashFiles('**/go.sum') }} - - - name: Build dashapp - run: cd encr.dev/cli/daemon/dash/dashapp && npm install && npm run build - - - name: Build - run: cd encr.dev && go build ./... - - - name: Test - run: cd encr.dev && go test -short ./... - env: - ENCORE_GOROOT: ${{ github.workspace }}/encore-go diff --git a/.github/workflows/makefile b/.github/workflows/makefile new file mode 100644 index 0000000000..da16fb58b7 --- /dev/null +++ b/.github/workflows/makefile @@ -0,0 +1,59 @@ +# This makefile is used inconjunction with the .reviewdog.yml file in the root of the repo +.PHONY: list-modules go-vet staticcheck errcheck ineffassign go-fmt + +# Automatically gather all information +ALL_SRC := $(shell find ../../ -name "*.go") +ALL_MODS = $(shell find ../../ -name go.mod) +MOD_DIRS = $(sort $(realpath $(dir $(ALL_MODS)))) +REPO_DIR := $(realpath ../../) +SEMGREP_DIR := "$(REPO_DIR)/tools/semgrep-rules" + +# List modules reports all found Go modules within the repository +list-modules: + @echo $(MOD_DIRS) + +# Function to run a command in each Go module with appropriate build tags +# +# REL_DIR is the relative path to the file from the repository root +# it is computed by removing the REPO_DIR prefix from the $dir variable, +# then we remove the prefix "/" to make it relative +# and finally escaping the slashes so we can use it in sed +define run_for_each_module + @for dir in $(MOD_DIRS); do \ + TAGS=""; \ + if [ "$$dir" != "$(REPO_DIR)" ]; then \ + TAGS="-tags encore,encore_internal,encore_app"; \ + fi; \ + REL_DIR=$$(echo "$${dir#$(REPO_DIR)}/" | sed 's/^\///' | sed 's/\//\\\//g'); \ + (cd "$$dir" && $(1) $$TAGS $(2) | sed "s/^\.\//$$REL_DIR/"); \ + done; +endef + +# Run Go vet +go-vet: $(ALL_SRC) + # The sed statements are: + # + # 1. Remove any lines starting with "#" (go vet uses these for each package) + # 2. Remove any "vet: " prefix from the output (sometimes we get this sometimes we dont) + # 3. Remove any "./" prefix from the output (we'll get this for files which exist directly in the module root folder - this is done so we don't double up next) + # 4. Add a "./" prefix to the output (this is so the sed within the run_for_each_module function can add the module path to each line) + $(call run_for_each_module,go vet,./... 2>&1 | sed '/^#/d' | sed 's/^vet: //' | sed 's/^\.\///' | sed "s/^/\.\//") + +## Run staticcheck +staticcheck: $(ALL_SRC) + $(call run_for_each_module,staticcheck -tests=false -f=json,./... | jq -f "$(REPO_DIR)/.github/workflows/staticcheck-to-rdjsonl.jq" -c) + +# Run errcheck +errcheck: $(ALL_SRC) + $(call run_for_each_module,errcheck -abspath,./...) + + +## Run ineffassign +ineffassign: $(ALL_SRC) + $(call run_for_each_module,ineffassign,./... 2>&1) + +semgrep: $(ALL_SRC) + @cd $(REPO_DIR) && semgrep scan --quiet --config=auto --config=$(SEMGREP_DIR) --json | jq -f "$(REPO_DIR)/.github/workflows/semgrep-to-rdjson.jq" -c + +go-fmt: $(ALL_SRC) + @cd $(REPO_DIR) && gofmt -s -d . || exit 0 diff --git a/.github/workflows/release-2.yml b/.github/workflows/release-2.yml new file mode 100644 index 0000000000..a56791f8ba --- /dev/null +++ b/.github/workflows/release-2.yml @@ -0,0 +1,119 @@ +name: Release (2.0) + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to build ("v1.2.3", "v1.2.3-nightly.20231231", "v1.2.3-beta.1" or "v0.0.0-develop+[commitHash]")' + type: string + required: true + +jobs: + release: + name: "Run Release Script" + runs-on: self-hosted + env: + GOROOT: /usr/local/go-1.21.4 + RUSTUP_HOME: /usr/local/rust/rustup + + steps: + - name: Checkout the repo + uses: actions/checkout@v4 + with: + path: encr.dev + + - name: Trigger release script + env: + NPM_PUBLISH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} + run: | + cd ${{ github.workspace }}/encr.dev + go run ./pkg/encorebuild/cmd/make-release/ -dst "${{ github.workspace }}/build" -v "${{ github.event.inputs.version }}" -publish-npm=true + + - name: Publish artifact (darwin_amd64) + uses: actions/upload-artifact@v3 + with: + name: encore-${{ github.event.inputs.version }}-darwin_amd64 + path: ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-darwin_amd64.tar.gz + + - name: Publish artifact (darwin_arm64) + uses: actions/upload-artifact@v3 + with: + name: encore-${{ github.event.inputs.version }}-darwin_arm64 + path: ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-darwin_arm64.tar.gz + + - name: Publish artifact (linux_amd64) + uses: actions/upload-artifact@v3 + with: + name: encore-${{ github.event.inputs.version }}-linux_amd64 + path: ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-linux_amd64.tar.gz + + - name: Publish artifact (linux_arm64) + uses: actions/upload-artifact@v3 + with: + name: encore-${{ github.event.inputs.version }}-linux_arm64 + path: ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-linux_arm64.tar.gz + + - name: Publish artifact (windows_amd64) + uses: actions/upload-artifact@v3 + with: + name: encore-${{ github.event.inputs.version }}-windows_amd64 + path: ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-windows_amd64.tar.gz + + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Registry + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Copy linux artifacts to docker context folder + run: | + mkdir -p ${{ github.workspace }}/encr.dev/.github/dockerimg/artifacts + cp ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-linux_amd64.tar.gz ${{ github.workspace }}/encr.dev/.github/dockerimg/artifacts/encore-linux_amd64.tar.gz + cp ${{ github.workspace }}/build/artifacts/encore-${{ github.event.inputs.version }}-linux_arm64.tar.gz ${{ github.workspace }}/encr.dev/.github/dockerimg/artifacts/encore-linux_arm64.tar.gz + + - name: Create metadata (tags, labels) for Docker image + id: docker-meta + uses: docker/metadata-action@v5 + with: + images: encoredotdev/encore + labels: | + org.opencontainers.image.title=Encore + org.opencontainers.image.vendor=encore.dev + org.opencontainers.image.authors=support@encore.dev + org.opencontainers.image.description=Encore is the end-to-end Backend Development Platform that lets you escape cloud complexity. + tags: | + type=raw,value=latest,enable=${{ !contains(github.event.inputs.version, '-') }} + type=semver,pattern={{version}},value=${{ github.event.inputs.version }} + type=sha + type=schedule,pattern=nightly,enable=${{ contains(github.event.inputs.version, '-nightly.') }} + type=semver,pattern={{major}}.{{minor}},value=${{ github.event.inputs.version }},enable=${{ !contains(github.event.inputs.version, '-') }} + type=semver,pattern={{major}},value=${{ github.event.inputs.version }},enable=${{ !contains(github.event.inputs.version, '-') }} + + - name: Build and push docker images + uses: docker/build-push-action@v4 + with: + context: encr.dev/.github/dockerimg + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.docker-meta.outputs.tags }} + labels: ${{ steps.docker-meta.outputs.labels }} + cache-from: type=inline + cache-to: type=inline + build-args: | + RELEASE_VERSION=${{ github.event.inputs.version }} + + notify_release_success: + name: "Notify release system of successful build" + runs-on: self-hosted + needs: + - release + steps: + - name: Webhook + uses: distributhor/workflow-webhook@f5a294e144d6ef44cfac4d3d5e20b613bcee0d4b # v3.0.7 + env: + webhook_type: "json" + webhook_url: ${{ secrets.RELEASE_WEBHOOK }} + data: '{ "version": "${{ github.event.inputs.version }}", "run_id": "${{ github.run_id }}" }' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 08f419b8b2..765ba0ec72 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ on: description: 'Version to build ("1.2.3")' required: true encorego_version: - description: 'Encore-Go version to use ("1.2.3")' + description: 'Encore-Go version to use ("encore-go1.17.7")' required: true jobs: @@ -15,62 +15,142 @@ jobs: strategy: matrix: include: - - builder: ubuntu-latest + - builder: ubuntu-24.04 goos: linux goarch: amd64 - - builder: macos-latest + release_key: linux_x86-64 + - builder: ubuntu-24.04 + goos: linux + goarch: arm64 + release_key: linux_arm64 + - builder: macos-11 goos: darwin goarch: amd64 - - builder: macos-latest + release_key: macos_x86-64 + - builder: macos-11 goos: darwin goarch: arm64 + release_key: macos_arm64 - builder: windows-latest goos: windows goarch: amd64 - + release_key: windows_x86-64 + runs-on: ${{ matrix.builder }} steps: - - name: Check out repo - uses: actions/checkout@v2 - with: - path: encr.dev - - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.16 - - - id: go-cache-paths - run: | - echo "::set-output name=go-mod::$(go env GOMODCACHE)" - - - name: Install encore-go - run: curl -o encore-go.tar.gz https://d2f391esomvqpi.cloudfront.net/encore-go-${{ github.event.inputs.encorego_version }}-${{ matrix.goos }}_${{ matrix.goarch }}.tar.gz && tar -C ${{ github.workspace }} -xzf ./encore-go.tar.gz - - - name: Go Mod Cache - uses: actions/cache@v2 - with: - path: ${{ steps.go-cache-paths.outputs.go-mod }} - key: ${{ matrix.goos }}-${{ matrix.goarch }}-go-mod-${{ hashFiles('**/go.sum') }} - - - name: Build - run: cd encr.dev && go run ./pkg/make-release/make-release.go -v="${{ github.event.inputs.version }}" -dst=dist -goos=${{ matrix.goos }} -goarch=${{ matrix.goarch }} -encore-go="../encore-go" - env: - GO111MODULE: "on" - if: runner.os != 'windows' - - - name: Build - run: cd encr.dev && .\pkg\make-release\windows\build.bat - env: - GO111MODULE: "on" - ENCORE_VERSION: "${{ github.event.inputs.version }}" - ENCORE_GOROOT: "../encore-go" - if: runner.os == 'windows' - - - name: 'Tar artifacts' - run: tar -czvf encore-${{ github.event.inputs.version }}-${{ matrix.goos }}_${{ matrix.goarch }}.tar.gz -C encr.dev/dist/${{ matrix.goos }}_${{ matrix.goarch }} . - - name: Publish artifact - uses: actions/upload-artifact@v2 - with: - name: encore-${{ github.event.inputs.version }}-${{ matrix.goos }}_${{ matrix.goarch }} - path: encore-${{ github.event.inputs.version }}-${{ matrix.goos }}_${{ matrix.goarch }}.tar.gz + - name: Check out repo + uses: actions/checkout@v4 + with: + path: encr.dev + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version-file: "encr.dev/go.mod" + check-latest: true + cache-dependency-path: "encr.dev/go.sum" + + - name: Set up Zig + uses: goto-bus-stop/setup-zig@7ab2955eb728f5440978d5824358023be3a2802d # v2.2.0 + with: + version: 0.10.1 + + - name: Install encore-go + run: curl --fail -o encore-go.tar.gz -L https://github.com/encoredev/go/releases/download/${{ github.event.inputs.encorego_version }}/${{ matrix.release_key }}.tar.gz && tar -C ${{ github.workspace }} -xzf ./encore-go.tar.gz + + - name: Build + run: cd encr.dev && go run ./pkg/make-release/make-release.go -v="${{ github.event.inputs.version }}" -dst=dist -goos=${{ matrix.goos }} -goarch=${{ matrix.goarch }} -encore-go="../encore-go" + env: + GO111MODULE: "on" + if: runner.os != 'windows' + + - name: Build + run: cd encr.dev && .\pkg\make-release\windows\build.bat + env: + GO111MODULE: "on" + ENCORE_VERSION: "${{ github.event.inputs.version }}" + ENCORE_GOROOT: "../encore-go" + if: runner.os == 'windows' + + - name: "Tar artifacts" + run: tar -czvf encore-${{ github.event.inputs.version }}-${{ matrix.goos }}_${{ matrix.goarch }}.tar.gz -C encr.dev/dist/${{ matrix.goos }}_${{ matrix.goarch }} . + - name: Publish artifact + uses: actions/upload-artifact@v3 + with: + name: encore-${{ github.event.inputs.version }}-${{ matrix.goos }}_${{ matrix.goarch }} + path: encore-${{ github.event.inputs.version }}-${{ matrix.goos }}_${{ matrix.goarch }}.tar.gz + + publish-docker-images: + name: "publish docker images" + runs-on: ubuntu-24.04 + needs: build + permissions: + contents: read + packages: write + + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: .github + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: .github/dockerimg/artifacts + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to Docker Registry + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v4 + with: + images: encoredotdev/encore + labels: | + org.opencontainers.image.title=Encore + org.opencontainers.image.vendor=encore.dev + org.opencontainers.image.authors=support@encore.dev + org.opencontainers.image.description=Encore is the end-to-end Backend Development Platform that lets you escape cloud complexity. + tags: | + type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} + type=semver,pattern={{version}},value=v${{ github.event.inputs.version }} + type=semver,pattern={{major}}.{{minor}},value=v${{ github.event.inputs.version }} + type=semver,pattern={{major}},value=v${{ github.event.inputs.version }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: .github/dockerimg + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + build-args: | + RELEASE_VERSION=${{ github.event.inputs.version }} + + notify_release_success: + needs: + - build + - publish-docker-images + runs-on: ubuntu-24.04 + steps: + - name: Webhook + uses: distributhor/workflow-webhook@v3.0.7 + env: + webhook_type: "json" + webhook_url: ${{ secrets.RELEASE_WEBHOOK }} + data: '{ "version": "${{ github.event.inputs.version }}", "run_id": "${{ github.run_id }}" }' diff --git a/.github/workflows/semgrep-to-rdjson.jq b/.github/workflows/semgrep-to-rdjson.jq new file mode 100644 index 0000000000..e374fb476e --- /dev/null +++ b/.github/workflows/semgrep-to-rdjson.jq @@ -0,0 +1,43 @@ +# See https://github.com/reviewdog/reviewdog/tree/master/proto/rdf +{ + source: { + name: "semgrep", + url: "https://semgrep.dev/", + }, + diagnostics: [ + .results[] | { + code: { + value: .check_id, + url: [ + .extra.metadata.shortlink?, + .extra.metadata.source?, + .extra."semgrep.dev".rule.url?, + "https://github.com/encoredev/encore/blob/main/\(.check_id | gsub("\\."; "/")).yml" + ] | map(select(. != null)) | first, + }, + message: .extra.message, + location: { + path: .path, + range: { + start: { + line: .start.line, + column: .start.col + }, + end: { + line: .end.line, + column: .end.col + }, + }, + }, + severity: .extra.severity, + + # Temporary variable we store to track the fix + _res: . + } | + if ._res.extra.fix then .suggestions = [{ + range: .location.range, + text: ._res.extra.fix, + }] else . end | + del(._res) + ] +} diff --git a/.github/workflows/staticcheck-to-rdjsonl.jq b/.github/workflows/staticcheck-to-rdjsonl.jq new file mode 100644 index 0000000000..2ca6946eae --- /dev/null +++ b/.github/workflows/staticcheck-to-rdjsonl.jq @@ -0,0 +1,19 @@ +# See https://github.com/reviewdog/reviewdog/tree/master/proto/rdf +{ + source: { + name: "staticcheck", + url: "https://staticcheck.io" + }, + message: .message, + code: {value: .code, url: "https://staticcheck.io/docs/checks#\(.code)"}, + location: { + path: .location.file, + range: { + start: { + line: .location.line, + column: .location.column + } + } + }, + severity: ((.severity|ascii_upcase|select(match("ERROR|WARNING|INFO")))//null) +} diff --git a/.gitignore b/.gitignore index 30a3d5eb4c..8ef7422ae3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,10 +2,25 @@ /dist /encore /git-remote-encore +/target # Don't commit dotfiles /.encore /.vscode # Build artifact that must be placed alongside go files for Windows -*.syso \ No newline at end of file +*.syso + +# JetBrains +.idea +.fleet +.run + +# MacOS +.DS_Store + +runtimes/supervisor-encore + +runtimes/supervisor-encore-linux-amd64 + +encore-runtime.node-linux-amd64 diff --git a/.prettierrc.toml b/.prettierrc.toml new file mode 100644 index 0000000000..66c04fd783 --- /dev/null +++ b/.prettierrc.toml @@ -0,0 +1 @@ +trailingComma = "none" diff --git a/.reviewdog.yml b/.reviewdog.yml new file mode 100644 index 0000000000..74a5d9e16a --- /dev/null +++ b/.reviewdog.yml @@ -0,0 +1,33 @@ +# Encore's reviewdog configuration file. +# +# This runs in our CI pipeline when you open a PR. To run this locally +# and get the same results as our CI pipeline, run: `./check.bash` +# +# We use a makefile rather than the commands directly as this repo +# has multiple Go modules within it and most tools only look at the +# module in the current directory. Thus our make file runs the tool +# for each module, combining the results into a single standardised +# that review dog can then parse and display as a single "run" for +# each tool. +runner: + go-vet: + cmd: make -s -C .github/workflows go-vet + format: govet + go-fmt: + cmd: make -s -C .github/workflows go-fmt + format: diff +# Disable staticcheck until it supports Go 1.21: https://github.com/dominikh/go-tools/issues/1431 +# staticcheck: +# cmd: make -s -C .github/workflows staticcheck +# format: rdjsonl + errcheck: + cmd: make -s -C .github/workflows errcheck + errorformat: + - "%f:%l:%c:\t%m" + ineffassign: + cmd: make -s -C .github/workflows ineffassign + errorformat: + - "%f:%l:%c: %m" + semgrep: + cmd: make -s -C .github/workflows semgrep + format: rdjson diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..1304d55a1c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,92 @@ +# Contributing to Encore + +We're so excited that you are interested in contributing to Encore! +All contributions are welcome, and there are several valuable ways to contribute. + +Below is a technical walkthrough of developing the `encore` command for contributing code +to the Encore project. Head over to the community section for [more ways to contribute](https://encore.dev/docs/community/contribute)! + +## GitHub Codespaces / VS Code Remote Containers +The easiest way to get started with developing Encore is using +GitHub Codespaces. Simply open this repository in a new Codespace +and your development environment will be set up with everything preconfigured for building the `encore` CLI and running applications with it. + +This also works just as well with [Visual Studio Code's Remote Development](https://code.visualstudio.com/docs/remote/remote-overview). + + +## Building the encore command from source +To build from the source simply run `go build ./cli/cmd/encore` and `go install ./cli/cmd/git-remote-encore`. + +Running an Encore application requires both the Encore runtime (the `encore.dev` package) as well as a custom-built +[Go runtime](https://github.com/encoredev/go) to implement Encore's request semantics and automated instrumentation. + +As a result, the Encore Daemon must know where these two things exist on the filesystem to compile the Encore application properly. + +This must be done in one of two ways: embedding the installation path at compile time (similar to `GOROOT`) +or by setting an environment variable at runtime. + +The environment variables are: +- `ENCORE_RUNTIMES_PATH` – the path to the `encore.dev` runtime implementation. +- `ENCORE_GOROOT` – the path to encore-go on disk + +**ENCORE_RUNTIMES_PATH** + +This must be set to the location of the `encore.dev` runtime package. +It's located in this Git repository in the `runtimes` directory: + +```bash +export ENCORE_RUNTIMES_PATH=/path/to/encore/runtimes +``` + +**ENCORE_GOROOT** + +The `ENCORE_GOROOT` must be set to the path to the [Encore Go runtime](https://github.com/encoredev/go). +Unless you want to make changes to the Go runtime it's easiest to point this to an existing Encore installation. + +To do that, run `encore daemon env` and grab the value of `ENCORE_GOROOT`. For example (yours is probably different): + +```bash +export ENCORE_GOROOT=/opt/homebrew/Cellar/encore/0.16.2/libexec/encore-go +``` + +### Running applications when building from source +Once you've built your own `encore` binary and set the environment variables above, you're ready to go! + +Start the daemon with the built binary: `./encore daemon -f` + +Note that when you run commands like `encore run` must use the same `encore` binary the daemon is running. + + +### Testing the Daemon run logic +The codegen tests in the `internal/clientgen/client_test.go` file uses many auto generated files from the +`e2e-tests/testdata` directory. To generate the client files and other test files, run `go test -golden-update` from +the `e2e-tests` directory. This will generate client files for all the supported client generation languages. + +Running `go test ./internal/clientgen` will now work and use the most recent client generated files. If +you change the client or content of the `testdata` folder, you may need to regenerate the client files again. + +## Architecture + +The code base is divided into several parts: + +### cli +The `encore` command line interface. The encore background daemon +is located at `cli/daemon` and is responsible for managing processes, +setting up databases and talking with the Encore servers for operations like +fetching production logs. + +### parser +The Encore Parser statically analyzes Encore apps to build up a model +of the application dubbed the Encore Syntax Tree (EST) that lives in +`parser/est`. + +For speed the parser does not perform traditional type-checking; it does +limited type-checking for enforcing Encore-specific rules but otherwise +relies on the underlying Go compiler to perform type-checking as part of +building the application. + +### compiler +The Encore Compiler rewrites the source code based on the parsed +Encore Syntax Tree to create a fully functioning application. +It rewrites API calls & API handlers, injects instrumentation +and secret values, and more. diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000000..462758d190 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,7647 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom 0.2.15", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +dependencies = [ + "anstyle", + "windows-sys 0.59.0", +] + +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +dependencies = [ + "backtrace", +] + +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "array-init" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "assert_fs" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7efdb1fdb47602827a342857666feb372712cbc64b414172bd6b167a02927674" +dependencies = [ + "anstyle", + "doc-comment", + "globwalk", + "predicates 3.1.3", + "predicates-core", + "predicates-tree", + "tempfile", +] + +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + +[[package]] +name = "ast_node" +version = "0.9.5" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 2.0.95", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-compression" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "942c7cd7ae39e91bde4820d74132e9862e62c2f386c3aa90ccf55949f5bad63a" +dependencies = [ + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "async-trait" +version = "0.1.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "aws-config" +version = "1.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a50b30228d3af8865ce83376b4e99e1ffa34728220fe2860e4df0bb5278d6" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "hex", + "http 0.2.12", + "ring 0.17.8", + "time", + "tokio", + "tracing", + "url", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60e8f6b615cb5fc60a98132268508ad104310f0cfb25a1c22eee76efdf9154da" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-runtime" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b16d1aa50accc11a4b4d5c50f7fb81cc0cf60328259c587d0e6b0f11385bde46" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.68.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5ddf1dc70287dc9a2f953766a1fe15e3e74aef02fd1335f2afa475c9b4f4fc" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "fastrand", + "hex", + "hmac", + "http 0.2.12", + "http-body 0.4.6", + "lru", + "once_cell", + "percent-encoding", + "regex-lite", + "sha2", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sns" +version = "1.54.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71d976a6c87d15fd3ceab6fcaa17e0d02a05a06d30f5e268e4160b25dbde26d" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sqs" +version = "1.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6493ce2b27a2687b0d8a2453bf6ad2499012e9720c3367cb1206496ede475443" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1605dc0bf9f0a4b05b451441a17fcb0bda229db384f23bf5cead3adbab0664ac" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.54.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59f3f73466ff24f6ad109095e0f3f2c830bfb4cd6c8b12f744c8e61ebf4d3ba1" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.54.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "249b2acaa8e02fd4718705a9494e3eb633637139aa4bb09d70965b0448e865db" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d3820e0c08d0737872ff3c7c1f21ebbb6693d832312d6152bf18ef50a5471c2" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "crypto-bigint 0.5.5", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.2.0", + "once_cell", + "p256", + "percent-encoding", + "ring 0.17.8", + "sha2", + "subtle", + "time", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-async" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427cb637d15d63d6f9aae26358e1c9a9c09d5aa490d64b09354c8217cfef0f28" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.60.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba1a71073fca26775c8b5189175ea8863afb1c9ea2cceb02a5de5ad9dfbaa795" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "crc32c", + "crc32fast", + "hex", + "http 0.2.12", + "http-body 0.4.6", + "md-5", + "pin-project-lite", + "sha1", + "sha2", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cef7d0a272725f87e51ba2bf89f8c21e4df61b9e49ae1ac367a6d69916ef7c90" +dependencies = [ + "aws-smithy-types", + "bytes", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.60.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c8bc3e8fdc6b8d07d976e301c02fe553f72a39b7a9fea820e023268467d7ab6" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.61.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4e69cc50921eb913c6b662f8d909131bb3e6ad6cb6090d3a39b66fc5c52095" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a05dd41a70fc74051758ee75b5c4db2c0ca070ed9229c3df50e9475cda1cb985" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "http-body 1.0.1", + "httparse", + "hyper 0.14.32", + "hyper-rustls 0.24.2", + "once_cell", + "pin-project-lite", + "pin-utils", + "rustls 0.21.12", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92165296a47a812b267b4f41032ff8069ab7ff783696d217f0994a0d7ab585cd" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.2.0", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38ddc9bd6c28aeb303477170ddd183760a956a03e083b3902a990238a7e3792d" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http 1.2.0", + "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab0b0166827aa700d3dc519f72f8b3a91c35d0b8d042dc5d643a91e6f80648fc" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5221b91b3e441e6675310829fd8984801b772cb1546ef6c0e54dec9f1ac13fef" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "rustc_version", + "tracing", +] + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core 0.3.4", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper 0.1.2", + "tower 0.4.13", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core 0.4.5", + "base64 0.22.1", + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.2", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper 1.0.2", + "tokio", + "tokio-tungstenite 0.24.0", + "tower 0.5.2", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.2", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "backoff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721c249ab59cbc483ad4294c9ee2671835c1e43e9ffc277e6b4ecfef733cfdc5" +dependencies = [ + "instant", + "rand 0.7.3", +] + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base32" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bb8" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89aabfae550a5c44b43ab941844ffcd2e993cb6900b342debf59e9ea74acdb8" +dependencies = [ + "async-trait", + "futures-util", + "parking_lot", + "tokio", +] + +[[package]] +name = "bb8-postgres" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56ac82c42eb30889b5c4ee4763a24b8c566518171ebea648cd7e3bc532c60680" +dependencies = [ + "async-trait", + "bb8", + "tokio", + "tokio-postgres", +] + +[[package]] +name = "better_scoped_tls" +version = "0.1.1" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "scoped-tls", +] + +[[package]] +name = "bindgen" +version = "0.66.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b84e06fc203107bfbad243f4aba2af864eb7db3b1cf46ea0a023b0b433d2a7" +dependencies = [ + "bitflags 2.9.4", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.95", + "which", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "brotli" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bstr" +version = "1.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" +dependencies = [ + "memchr", + "regex-automata 0.4.9", + "serde", +] + +[[package]] +name = "built" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b99c4cdc7b2c2364182331055623bdf45254fcb679fea565c40c3c11c101889a" +dependencies = [ + "cargo-lock", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytemuck" +version = "1.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "cargo-lock" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e11c675378efb449ed3ce8de78d75d0d80542fc98487c26aba28eb3b82feac72" +dependencies = [ + "semver", + "serde", + "toml", + "url", +] + +[[package]] +name = "cc" +version = "1.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" +dependencies = [ + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.52.6", +] + +[[package]] +name = "cidr" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd1b64030216239a2e7c364b13cd96a2097ebf0dfe5025f2dedee14a23f2ab60" + +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_derive", + "clap_lex", + "indexmap 1.9.3", + "once_cell", + "strsim 0.10.0", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap_derive" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "clean-path" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aaa6b4b263a5d737e9bf6b7c09b72c41a5480aec4d7219af827f6564e950b6a5" + +[[package]] +name = "cmake" +version = "0.1.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" +dependencies = [ + "cc", +] + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console" +version = "0.15.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "windows-sys 0.59.0", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32c" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a47af21622d091a8f0fb295b88bc886ac74efcc613efc19f5d0b21de5c89e47" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "cron-parser" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa8b40ccc59e09cc54bec675bea0e79dae34f5b900c8c5e06d8d6db884107adb" +dependencies = [ + "chrono", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn 2.0.95", +] + +[[package]] +name = "daemonize" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab8bfdaacb3c887a54d41bdf48d3af8873b3f5566469f8ba21b92057509f116e" +dependencies = [ + "libc", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.11.1", + "syn 2.0.95", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "duct" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ab5718d1224b63252cd0c6f74f6480f9ffeb117438a2e0f5cf6d9a4798929c" +dependencies = [ + "libc", + "once_cell", + "os_pipe", + "shared_child", +] + +[[package]] +name = "duration-string" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fcc1d9ae294a15ed05aeae8e11ee5f2b3fe971c077d45a42fb20825fba6ee13" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der 0.6.1", + "elliptic-curve", + "rfc6979", + "signature 1.6.4", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint 0.4.9", + "der 0.6.1", + "digest", + "ff", + "generic-array", + "group", + "pkcs8 0.9.0", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" +dependencies = [ + "serde", +] + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "encore-js-runtime" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum 0.7.9", + "bytes", + "chrono", + "encore-runtime-core", + "futures", + "log", + "malachite", + "mappable-rc", + "napi", + "napi-build", + "napi-derive", + "num_cpus", + "prost 0.12.6", + "prost-types 0.12.6", + "serde_json", + "tokio", + "tokio-util", +] + +[[package]] +name = "encore-runtime-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_matches", + "async-stream", + "aws-config", + "aws-credential-types", + "aws-sdk-s3", + "aws-sdk-sns", + "aws-sdk-sqs", + "aws-smithy-types", + "axum 0.7.9", + "backtrace", + "base32", + "base64 0.21.7", + "bb8", + "bb8-postgres", + "byteorder", + "bytes", + "chrono", + "cidr", + "colored", + "cookie", + "duct", + "email_address", + "env_logger 0.10.2", + "flate2", + "form_urlencoded", + "futures", + "futures-core", + "futures-util", + "gjson", + "google-cloud-gax", + "google-cloud-googleapis", + "google-cloud-pubsub", + "google-cloud-storage", + "hex", + "hmac", + "http 1.2.0", + "http-body-util", + "httpdate", + "hyper 1.5.2", + "indexmap 2.7.0", + "insta", + "jsonwebtoken", + "log", + "malachite", + "matchit", + "md5", + "mime", + "native-tls", + "once_cell", + "openssl", + "openssl-probe", + "percent-encoding", + "pgvector", + "pingora", + "postgres-native-tls", + "postgres-protocol", + "proptest", + "prost 0.12.6", + "prost-build 0.12.6", + "prost-types 0.12.6", + "quickcheck", + "radix_fmt", + "rand 0.8.5", + "regex", + "reqwest 0.12.12", + "rsa", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "serde_with", + "sha2", + "sha3", + "subtle", + "thiserror 1.0.69", + "tokio", + "tokio-nsq", + "tokio-postgres", + "tokio-retry", + "tokio-stream", + "tokio-tungstenite 0.21.0", + "tokio-util", + "tower-http", + "tower-service", + "tracing", + "tracing-subscriber", + "url", + "urlencoding", + "uuid", + "xid", +] + +[[package]] +name = "encore-supervisor" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum 0.7.9", + "base64 0.21.7", + "bytes", + "env_logger 0.11.6", + "flate2", + "futures", + "http 1.2.0", + "hyper 1.5.2", + "libc", + "log", + "openssl", + "pingora", + "prost 0.12.6", + "prost-build 0.12.6", + "prost-types 0.12.6", + "reqwest 0.12.12", + "serde", + "serde_json", + "tokio", + "tokio-retry", + "tokio-util", + "url", +] + +[[package]] +name = "encore-tsparser" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_fs", + "assert_matches", + "chrono", + "clean-path", + "convert_case", + "cron-parser", + "duct", + "env_logger 0.10.2", + "handlebars", + "indexmap 2.7.0", + "insta", + "itertools 0.13.0", + "junction", + "litparser", + "litparser-derive", + "log", + "matchit", + "once_cell", + "pg_query", + "prost 0.12.6", + "prost-build 0.12.6", + "regex", + "semver", + "serde", + "serde_json", + "serde_yaml 0.9.34+deprecated", + "swc_common", + "swc_ecma_ast", + "swc_ecma_loader", + "swc_ecma_parser", + "swc_ecma_transforms_base", + "swc_ecma_visit", + "symlink", + "tempdir", + "thiserror 1.0.69", + "tracing", + "tracing-subscriber", + "txtar", + "url", + "walkdir", +] + +[[package]] +name = "env_filter" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "humantime", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "erased-serde" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d" +dependencies = [ + "serde", + "typeid", +] + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flate2" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +dependencies = [ + "crc32fast", + "libz-ng-sys", + "miniz_oxide", +] + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +dependencies = [ + "num-traits", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "from_variant" +version = "0.1.6" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "pmutil", + "proc-macro2", + "swc_macros_common", + "syn 2.0.95", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "geo-types" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6f47c611187777bbca61ea7aba780213f5f3441fd36294ab333e96cfa791b65" +dependencies = [ + "approx", + "num-traits", + "serde", +] + +[[package]] +name = "gethostname" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "gjson" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43503cc176394dd30a6525f5f36e838339b8b5619be33ed9a7783841580a97b6" + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "globset" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "globwalk" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" +dependencies = [ + "bitflags 2.9.4", + "ignore", + "walkdir", +] + +[[package]] +name = "google-cloud-auth" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf7cb7864f08a92e77c26bb230d021ea57691788fb5dd51793f96965d19e7f9" +dependencies = [ + "async-trait", + "base64 0.21.7", + "google-cloud-metadata 0.4.0", + "google-cloud-token", + "home", + "jsonwebtoken", + "reqwest 0.11.27", + "serde", + "serde_json", + "thiserror 1.0.69", + "time", + "tokio", + "tracing", + "urlencoding", +] + +[[package]] +name = "google-cloud-auth" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57a13fbacc5e9c41ded3ad8d0373175a6b7a6ad430d99e89d314ac121b7ab06" +dependencies = [ + "async-trait", + "base64 0.21.7", + "google-cloud-metadata 0.5.0", + "google-cloud-token", + "home", + "jsonwebtoken", + "reqwest 0.12.12", + "serde", + "serde_json", + "thiserror 1.0.69", + "time", + "tokio", + "tracing", + "urlencoding", +] + +[[package]] +name = "google-cloud-gax" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cb60314136e37de9e2a05ddb427b9c5a39c3d188de2e2f026c6af74425eef44" +dependencies = [ + "google-cloud-token", + "http 0.2.12", + "thiserror 1.0.69", + "tokio", + "tokio-retry", + "tonic", + "tower 0.4.13", + "tracing", +] + +[[package]] +name = "google-cloud-googleapis" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db8a478015d079296167e3f08e096dc99cffc2cb50fa203dd38aaa9dd37f8354" +dependencies = [ + "prost 0.12.6", + "prost-types 0.12.6", + "tonic", +] + +[[package]] +name = "google-cloud-metadata" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc279bfb50487d7bcd900e8688406475fc750fe474a835b2ab9ade9eb1fc90e2" +dependencies = [ + "reqwest 0.11.27", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "google-cloud-metadata" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04f945a208886a13d07636f38fb978da371d0abc3e34bad338124b9f8c135a8f" +dependencies = [ + "reqwest 0.12.12", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "google-cloud-pubsub" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f6e4fdcd8303ad0d0cdb8b5722aa3a57de9534af27d4da71fc4d3179174a896" +dependencies = [ + "async-channel", + "async-stream", + "google-cloud-auth 0.13.2", + "google-cloud-gax", + "google-cloud-googleapis", + "google-cloud-token", + "prost-types 0.12.6", + "thiserror 1.0.69", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "google-cloud-storage" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7347a3d65cd64db51e5b4aebf0c68c484042948c6d53f856f58269bc9816360" +dependencies = [ + "anyhow", + "async-stream", + "async-trait", + "base64 0.21.7", + "bytes", + "futures-util", + "google-cloud-auth 0.17.2", + "google-cloud-metadata 0.5.0", + "google-cloud-token", + "hex", + "once_cell", + "percent-encoding", + "pkcs8 0.10.2", + "regex", + "reqwest 0.12.12", + "reqwest-middleware", + "ring 0.17.8", + "serde", + "serde_json", + "sha2", + "thiserror 1.0.69", + "time", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "google-cloud-token" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c12ba8b21d128a2ce8585955246977fbce4415f680ebf9199b6f9d6d725f" +dependencies = [ + "async-trait", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.7.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.2.0", + "indexmap 2.7.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "handlebars" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa67bab9ff362228eb3d00bd024a4965d8231bbb7921167f0cfa66c6626b225" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "hstr" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63d6824358c0fd9a68bb23999ed2ef76c84f79408a26ef7ae53d5f370c94ad36" +dependencies = [ + "hashbrown 0.14.5", + "new_debug_unreachable", + "once_cell", + "phf 0.11.3", + "rustc-hash", + "triomphe", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.2.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "log", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +dependencies = [ + "futures-util", + "http 1.2.0", + "hyper 1.5.2", + "hyper-util", + "rustls 0.23.20", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.1", + "tower-service", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper 0.14.32", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.32", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.5.2", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.5.2", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "ignore" +version = "0.4.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" +dependencies = [ + "crossbeam-deque", + "globset", + "log", + "memchr", + "regex-automata 0.4.9", + "same-file", + "walkdir", + "winapi-util", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", + "serde", +] + +[[package]] +name = "insta" +version = "1.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6513e4067e16e69ed1db5ab56048ed65db32d10ba5fc1217f5393f8f17d8b5a5" +dependencies = [ + "console", + "globset", + "linked-hash-map", + "once_cell", + "serde", + "similar", + "walkdir", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "ipnet" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" + +[[package]] +name = "is-macro" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d57a3e447e24c22647738e4607f1df1e0ec6f72e16182c4cd199f647cdfb0e4" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "is-terminal" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" +dependencies = [ + "hermit-abi 0.4.0", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" +dependencies = [ + "base64 0.21.7", + "js-sys", + "pem", + "ring 0.17.8", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "junction" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72bbdfd737a243da3dfc1f99ee8d6e166480f17ab4ac84d7c34aacd73fc7bd16" +dependencies = [ + "scopeguard", + "windows-sys 0.52.0", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin 0.9.8", +] + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "libloading" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +dependencies = [ + "cfg-if", + "windows-targets 0.52.6", +] + +[[package]] +name = "libm" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" + +[[package]] +name = "libz-ng-sys" +version = "1.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cee1488e961a80d172564fd6fcda11d8a4ac6672c06fe008e9213fa60520c2b" +dependencies = [ + "cmake", + "libc", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + +[[package]] +name = "litparser" +version = "0.1.0" +dependencies = [ + "anyhow", + "clean-path", + "duration-string", + "num-bigint", + "num-integer", + "num-traits", + "swc_common", + "swc_ecma_ast", +] + +[[package]] +name = "litparser-derive" +version = "0.1.0" +dependencies = [ + "anyhow", + "litparser", + "prettyplease", + "proc-macro2", + "quote", + "swc_common", + "swc_ecma_ast", + "swc_ecma_parser", + "syn 2.0.95", +] + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +dependencies = [ + "serde", + "value-bag", +] + +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.2", +] + +[[package]] +name = "malachite" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec410515e231332b14cd986a475d1c3323bcfa4c7efc038bfa1d5b410b1c57e4" +dependencies = [ + "malachite-base", + "malachite-nz", + "malachite-q", +] + +[[package]] +name = "malachite-base" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c738d3789301e957a8f7519318fcbb1b92bb95863b28f6938ae5a05be6259f34" +dependencies = [ + "hashbrown 0.15.2", + "itertools 0.14.0", + "libm", + "ryu", +] + +[[package]] +name = "malachite-nz" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1707c9a1fa36ce21749b35972bfad17bbf34cf5a7c96897c0491da321e387d3b" +dependencies = [ + "itertools 0.14.0", + "libm", + "malachite-base", + "wide", +] + +[[package]] +name = "malachite-q" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d764801aa4e96bbb69b389dcd03b50075345131cd63ca2e380bca71cc37a3675" +dependencies = [ + "itertools 0.14.0", + "malachite-base", + "malachite-nz", +] + +[[package]] +name = "mappable-rc" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "204651f31b0a6a7b2128d2b92c372cd94607b210c3a6b6e542c57a8cfd4db996" + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + +[[package]] +name = "napi" +version = "2.16.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "214f07a80874bb96a8433b3cdfc84980d56c7b02e1a0d7ba4ba0db5cef785e2b" +dependencies = [ + "anyhow", + "bitflags 2.9.4", + "ctor", + "napi-derive", + "napi-sys", + "once_cell", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "napi-build" +version = "2.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db836caddef23662b94e16bf1f26c40eceb09d6aee5d5b06a7ac199320b69b19" + +[[package]] +name = "napi-derive" +version = "2.16.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c" +dependencies = [ + "cfg-if", + "convert_case", + "napi-derive-backend", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "napi-derive-backend" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf" +dependencies = [ + "convert_case", + "once_cell", + "proc-macro2", + "quote", + "regex", + "semver", + "syn 2.0.95", +] + +[[package]] +name = "napi-sys" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3" +dependencies = [ + "libloading", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nix" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", + "memoffset", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + +[[package]] +name = "normpath" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a9da8c9922c35a1033d76f7272dfc2e7ee20392083d75aeea6ced23c6266578" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "openssl" +version = "0.10.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +dependencies = [ + "bitflags 2.9.4", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "300.4.1+3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa4eac4138c62414b5622d1b31c5c304f34b406b013c079c2bbc652fdd6678c" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "os_pipe" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p256" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" +dependencies = [ + "ecdsa", + "elliptic-curve", + "sha2", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "path-clean" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecba01bf2678719532c5e3059e0b5f0811273d94b397088b82e3bd0a78c78fdd" + +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" +dependencies = [ + "base64 0.22.1", + "serde", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" +dependencies = [ + "memchr", + "thiserror 2.0.10", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "pest_meta" +version = "2.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.7.0", +] + +[[package]] +name = "pg_query" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ca6fdb8f9d32182abf17328789f87f305dd8c8ce5bf48c5aa2b5cffc94e1c04" +dependencies = [ + "bindgen", + "cc", + "fs_extra", + "glob", + "itertools 0.10.5", + "prost 0.13.5", + "prost-build 0.13.5", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "pgvector" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc58e2d255979a31caa7cabfa7aac654af0354220719ab7a68520ae7a91e8c0b" +dependencies = [ + "bytes", + "postgres-types", + "serde", +] + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_macros 0.10.0", + "phf_shared 0.10.0", + "proc-macro-hack", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand 0.8.5", +] + +[[package]] +name = "phf_generator" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +dependencies = [ + "phf_shared 0.11.3", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "phf_macros" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher 1.0.1", +] + +[[package]] +name = "pin-project" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pingora" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79c9fc7098dc3e7d09d2d1647921005be9301cf68536826195dc5369e05124bd" +dependencies = [ + "pingora-cache", + "pingora-core", + "pingora-http", + "pingora-load-balancing", + "pingora-proxy", + "pingora-timeout", +] + +[[package]] +name = "pingora-cache" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ee62f28526d8d484621e77f8d6a1807f1bd07558a06ab5a204b4834d6be056" +dependencies = [ + "ahash", + "async-trait", + "blake2", + "bytes", + "hex", + "http 1.2.0", + "httparse", + "httpdate", + "indexmap 1.9.3", + "log", + "lru", + "once_cell", + "parking_lot", + "pingora-core", + "pingora-error", + "pingora-header-serde", + "pingora-http", + "pingora-lru", + "pingora-timeout", + "regex", + "rmp", + "rmp-serde", + "rustracing", + "rustracing_jaeger", + "serde", + "strum", + "tokio", +] + +[[package]] +name = "pingora-core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d123320b69bd06e897fc16bd1dde962a7b488c4d2ae825683fbca0198fad8669" +dependencies = [ + "ahash", + "async-trait", + "brotli", + "bytes", + "chrono", + "clap", + "daemonize", + "flate2", + "futures", + "h2 0.4.7", + "http 1.2.0", + "httparse", + "httpdate", + "libc", + "log", + "lru", + "nix", + "once_cell", + "openssl-probe", + "parking_lot", + "percent-encoding", + "pingora-error", + "pingora-http", + "pingora-openssl", + "pingora-pool", + "pingora-runtime", + "pingora-timeout", + "prometheus", + "rand 0.8.5", + "regex", + "serde", + "serde_yaml 0.8.26", + "sfv", + "socket2", + "strum", + "strum_macros", + "thread_local", + "tokio", + "tokio-test", + "unicase", + "windows-sys 0.59.0", + "zstd", +] + +[[package]] +name = "pingora-error" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6389511530152c535a554f592ae4a9691b1246cff20eb4564f2a34fc921195c0" + +[[package]] +name = "pingora-header-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcb3f62d852da015e76ced56e93e6d52941679a9825281c90f2897841129e59d" +dependencies = [ + "bytes", + "http 1.2.0", + "httparse", + "pingora-error", + "pingora-http", + "thread_local", + "zstd", + "zstd-safe", +] + +[[package]] +name = "pingora-http" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70202f126056f366549afc804741e12dd9f419cfc79a0063ab15653007a0f4c6" +dependencies = [ + "bytes", + "http 1.2.0", + "pingora-error", +] + +[[package]] +name = "pingora-ketama" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c1bb6c2e11823a05ec9140fc8827f112b8380d78b837535f284e0a98f24cc0a" +dependencies = [ + "crc32fast", +] + +[[package]] +name = "pingora-load-balancing" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84d558167ecb05cea487a6479700390a67fe414724f203e10c3912584a0f2cb1" +dependencies = [ + "arc-swap", + "async-trait", + "derivative", + "fnv", + "futures", + "http 1.2.0", + "log", + "pingora-core", + "pingora-error", + "pingora-http", + "pingora-ketama", + "pingora-runtime", + "rand 0.9.1", + "tokio", +] + +[[package]] +name = "pingora-lru" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb50f65f06c4b81ccb3edcceaa54bb9439608506b0b3b8c048798169a64aad8e" +dependencies = [ + "arrayvec", + "hashbrown 0.15.2", + "parking_lot", + "rand 0.9.1", +] + +[[package]] +name = "pingora-openssl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f18158b901a02289f2a2a954a531c96e4d0703c94f7c9291981c9e53fddc6c1" +dependencies = [ + "foreign-types", + "libc", + "openssl", + "openssl-src", + "openssl-sys", + "tokio-openssl", +] + +[[package]] +name = "pingora-pool" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bacdd5dbdec690d468856d988b170c8bb4ab62e0edefc0f432ba5e326489f421" +dependencies = [ + "crossbeam-queue", + "log", + "lru", + "parking_lot", + "pingora-timeout", + "thread_local", + "tokio", +] + +[[package]] +name = "pingora-proxy" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5031783d6743bd31e4de7d7c7a19e9eecf369174c3cbd8a57eb52bc6bf882d92" +dependencies = [ + "async-trait", + "bytes", + "clap", + "futures", + "h2 0.4.7", + "http 1.2.0", + "log", + "once_cell", + "pingora-cache", + "pingora-core", + "pingora-error", + "pingora-http", + "pingora-timeout", + "regex", + "tokio", +] + +[[package]] +name = "pingora-runtime" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31a7c445ca224630961045684201e3cf8da9af0b01f286ed54ff8b2403aaabff" +dependencies = [ + "once_cell", + "rand 0.8.5", + "thread_local", + "tokio", +] + +[[package]] +name = "pingora-timeout" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "685bb8808cc1919c63a06ab14fdac9b84a4887ced49259a5c0adc8bfb2ffe558" +dependencies = [ + "once_cell", + "parking_lot", + "pin-project-lite", + "thread_local", + "tokio", +] + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der 0.7.9", + "pkcs8 0.10.2", + "spki 0.7.3", +] + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der 0.6.1", + "spki 0.6.0", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der 0.7.9", + "spki 0.7.3", +] + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "pmutil" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "postgres-native-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d442770e2b1e244bb5eb03b31c79b65bb2568f413b899eaba850fa945a65954" +dependencies = [ + "futures", + "native-tls", + "tokio", + "tokio-native-tls", + "tokio-postgres", +] + +[[package]] +name = "postgres-protocol" +version = "0.6.8" +source = "git+https://github.com/encoredev/rust-postgres?branch=encore-patches-sync#e71577eea1d14769a3021b6dd214448e3de3ffb6" +dependencies = [ + "base64 0.22.1", + "byteorder", + "bytes", + "fallible-iterator", + "hmac", + "log", + "md-5", + "memchr", + "rand 0.9.1", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.9" +source = "git+https://github.com/encoredev/rust-postgres?branch=encore-patches-sync#e71577eea1d14769a3021b6dd214448e3de3ffb6" +dependencies = [ + "array-init", + "bytes", + "chrono", + "cidr", + "fallible-iterator", + "geo-types", + "postgres-protocol", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "predicates" +version = "2.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" +dependencies = [ + "difflib", + "float-cmp", + "itertools 0.10.5", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates" +version = "3.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" +dependencies = [ + "anstyle", + "difflib", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" + +[[package]] +name = "predicates-tree" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "prettyplease" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "483f8c21f64f3ea09fe0f30f5d48c3e8eefe5dac9129f0075f76593b4c1da705" +dependencies = [ + "proc-macro2", + "syn 2.0.95", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror 1.0.69", +] + +[[package]] +name = "proptest" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fcdab19deb5195a31cf7726a210015ff1496ba1464fd42cb4f537b8b01b471f" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.9.4", + "lazy_static", + "num-traits", + "rand 0.9.1", + "rand_chacha 0.9.0", + "rand_xorshift", + "regex-syntax 0.8.5", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "prost" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +dependencies = [ + "bytes", + "prost-derive 0.12.6", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive 0.13.5", +] + +[[package]] +name = "prost-build" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" +dependencies = [ + "bytes", + "heck 0.5.0", + "itertools 0.12.1", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost 0.12.6", + "prost-types 0.12.6", + "regex", + "syn 2.0.95", + "tempfile", +] + +[[package]] +name = "prost-build" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" +dependencies = [ + "heck 0.5.0", + "itertools 0.14.0", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost 0.13.5", + "prost-types 0.13.5", + "regex", + "syn 2.0.95", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +dependencies = [ + "anyhow", + "itertools 0.12.1", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "prost-types" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" +dependencies = [ + "prost 0.12.6", +] + +[[package]] +name = "prost-types" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" +dependencies = [ + "prost 0.13.5", +] + +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quickcheck" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" +dependencies = [ + "env_logger 0.8.4", + "log", + "rand 0.8.5", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + +[[package]] +name = "radix_fmt" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce082a9940a7ace2ad4a8b7d0b1eac6aa378895f18be598230c5f2284ac05426" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.15", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core 0.9.3", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +dependencies = [ + "bitflags 2.9.4", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper-tls 0.5.0", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg 0.50.0", +] + +[[package]] +name = "reqwest" +version = "0.12.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.2", + "hyper-rustls 0.27.5", + "hyper-tls 0.6.0", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "mime_guess", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 2.2.0", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "system-configuration 0.6.1", + "tokio", + "tokio-native-tls", + "tokio-util", + "tower 0.5.2", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + +[[package]] +name = "reqwest-middleware" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04" +dependencies = [ + "anyhow", + "async-trait", + "http 1.2.0", + "reqwest 0.12.12", + "serde", + "thiserror 1.0.69", + "tower-service", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rmp" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "rsa" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8 0.10.2", + "rand_core 0.6.4", + "signature 2.2.0", + "spki 0.7.3", + "subtle", + "zeroize", +] + +[[package]] +name = "rust_decimal" +version = "1.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555" +dependencies = [ + "arrayvec", + "num-traits", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" +dependencies = [ + "bitflags 2.9.4", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" +dependencies = [ + "base64 0.13.1", + "log", + "ring 0.16.20", + "sct 0.6.1", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki 0.101.7", + "sct 0.7.1", +] + +[[package]] +name = "rustls" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls" +version = "0.23.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustracing" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44822b10c095e574869de2b891e40c724fef42cadaea040d1cd3bdbb13d36a5" +dependencies = [ + "backtrace", + "crossbeam-channel", + "rand 0.8.5", + "trackable 0.2.24", +] + +[[package]] +name = "rustracing_jaeger" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6c2fe9411ef5f43ac773f0e84ad735804c55719346a7aad52de2d9162db97c8" +dependencies = [ + "crossbeam-channel", + "hostname", + "percent-encoding", + "rand 0.8.5", + "rustracing", + "thrift_codec", + "trackable 0.2.24", +] + +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "safe_arch" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b02de82ddbe1b636e6170c21be622223aea188ef2e139be0a5b219ec215323" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" +dependencies = [ + "ring 0.16.20", + "untrusted 0.7.1", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der 0.6.1", + "generic-array", + "pkcs8 0.9.0", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.4", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" +dependencies = [ + "serde", +] + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "serde_fmt" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d4ddca14104cd60529e8c7f7ba71a2c8acd8f7f5cfcdc2faf97eeb7c3010a4" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_json" +version = "1.0.135" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" +dependencies = [ + "indexmap 2.7.0", + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.7.0", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap 1.9.3", + "ryu", + "serde", + "yaml-rust", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.7.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sfv" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27daf6ed3fc7ffd5ea3ce9f684fe351c47e50f2fdbb6236e2bad0b440dbe408" +dependencies = [ + "data-encoding", + "indexmap 2.7.0", + "rust_decimal", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shared_child" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fa9338aed9a1df411814a5b2252f7cd206c55ae9bf2fa763f8de84603aa60c" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "similar" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" +dependencies = [ + "bstr", + "unicode-segmentation", +] + +[[package]] +name = "similar-asserts" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe85670573cd6f0fa97940f26e7e6601213c3b0555246c24234131f88c5709e" +dependencies = [ + "console", + "similar", +] + +[[package]] +name = "simple_asn1" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 1.0.69", + "time", +] + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "smartstring" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" +dependencies = [ + "autocfg", + "static_assertions", + "version_check", +] + +[[package]] +name = "snap" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" + +[[package]] +name = "socket2" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der 0.6.1", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der 0.7.9", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared 0.10.0", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_enum" +version = "0.4.1" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 2.0.95", +] + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.95", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "sval" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6dc0f9830c49db20e73273ffae9b5240f63c42e515af1da1fceefb69fceafd8" + +[[package]] +name = "sval_buffer" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "429922f7ad43c0ef8fd7309e14d750e38899e32eb7e8da656ea169dd28ee212f" +dependencies = [ + "sval", + "sval_ref", +] + +[[package]] +name = "sval_dynamic" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f16ff5d839396c11a30019b659b0976348f3803db0626f736764c473b50ff4" +dependencies = [ + "sval", +] + +[[package]] +name = "sval_fmt" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c01c27a80b6151b0557f9ccbe89c11db571dc5f68113690c1e028d7e974bae94" +dependencies = [ + "itoa", + "ryu", + "sval", +] + +[[package]] +name = "sval_json" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0deef63c70da622b2a8069d8600cf4b05396459e665862e7bdb290fd6cf3f155" +dependencies = [ + "itoa", + "ryu", + "sval", +] + +[[package]] +name = "sval_nested" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a39ce5976ae1feb814c35d290cf7cf8cd4f045782fe1548d6bc32e21f6156e9f" +dependencies = [ + "sval", + "sval_buffer", + "sval_ref", +] + +[[package]] +name = "sval_ref" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb7c6ee3751795a728bc9316a092023529ffea1783499afbc5c66f5fabebb1fa" +dependencies = [ + "sval", +] + +[[package]] +name = "sval_serde" +version = "2.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a5572d0321b68109a343634e3a5d576bf131b82180c6c442dee06349dfc652a" +dependencies = [ + "serde", + "sval", + "sval_nested", +] + +[[package]] +name = "swc_atoms" +version = "0.6.4" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "hstr", + "once_cell", + "rustc-hash", + "serde", +] + +[[package]] +name = "swc_cached" +version = "0.3.18" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "ahash", + "anyhow", + "dashmap", + "once_cell", + "regex", + "serde", +] + +[[package]] +name = "swc_common" +version = "0.33.8" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "ast_node", + "atty", + "better_scoped_tls", + "cfg-if", + "either", + "from_variant", + "new_debug_unreachable", + "num-bigint", + "once_cell", + "rustc-hash", + "serde", + "siphasher 0.3.11", + "string_cache", + "swc_atoms", + "swc_eq_ignore_macros", + "swc_visit", + "termcolor", + "tracing", + "unicode-width", + "url", +] + +[[package]] +name = "swc_ecma_ast" +version = "0.110.9" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "bitflags 2.9.4", + "is-macro", + "num-bigint", + "phf 0.11.3", + "scoped-tls", + "string_enum", + "swc_atoms", + "swc_common", + "unicode-id", +] + +[[package]] +name = "swc_ecma_loader" +version = "0.45.9" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "anyhow", + "dashmap", + "indexmap 1.9.3", + "normpath", + "once_cell", + "path-clean", + "pathdiff", + "serde", + "serde_json", + "swc_cached", + "swc_common", + "tracing", +] + +[[package]] +name = "swc_ecma_parser" +version = "0.141.21" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "either", + "new_debug_unreachable", + "num-bigint", + "num-traits", + "phf 0.11.3", + "serde", + "smallvec", + "smartstring", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "tracing", + "typed-arena", +] + +[[package]] +name = "swc_ecma_transforms_base" +version = "0.134.30" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "better_scoped_tls", + "bitflags 2.9.4", + "indexmap 1.9.3", + "once_cell", + "phf 0.10.1", + "rustc-hash", + "serde", + "smallvec", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_parser", + "swc_ecma_utils", + "swc_ecma_visit", + "tracing", +] + +[[package]] +name = "swc_ecma_utils" +version = "0.124.26" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "indexmap 1.9.3", + "num_cpus", + "once_cell", + "rustc-hash", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_visit", + "tracing", + "unicode-id", +] + +[[package]] +name = "swc_ecma_visit" +version = "0.96.9" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "num-bigint", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_visit", + "tracing", +] + +[[package]] +name = "swc_eq_ignore_macros" +version = "0.1.2" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "swc_macros_common" +version = "0.3.8" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "swc_visit" +version = "0.5.7" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "either", + "swc_visit_macros", +] + +[[package]] +name = "swc_visit_macros" +version = "0.5.8" +source = "git+https://github.com/encoredev/swc?branch=node-resolve-exports#3ccddcb7d70380b6952296717b2d9f2056f4c2ac" +dependencies = [ + "Inflector", + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 2.0.95", +] + +[[package]] +name = "symlink" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46f71c0377baf4ef1cc3e3402ded576dccc315800fbc62dfc7fe04b009773b4a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "sysctl" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225e483f02d0ad107168dc57381a8a40c3aeea6abe47f37506931f861643cfa8" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "libc", + "thiserror 1.0.69", + "walkdir", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.9.4", + "core-foundation", + "system-configuration-sys 0.6.0", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + +[[package]] +name = "tempfile" +version = "3.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +dependencies = [ + "cfg-if", + "fastrand", + "getrandom 0.2.15", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ac7f54ca534db81081ef1c1e7f6ea8a3ef428d2fc069097c079443d24124d3" +dependencies = [ + "thiserror-impl 2.0.10", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9465d30713b56a37ede7185763c3492a91be2f5fa68d958c44e41ab9248beb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "thrift_codec" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fb61fb3d0a0af14949f3a6949b2639112e13226647112824f4d081533f9b1a8" +dependencies = [ + "byteorder", + "trackable 0.2.24", +] + +[[package]] +name = "time" +version = "0.3.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-nsq" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "289e54c5548b30d6fd1edb525812fa26c745ba0dccdf5fc552ffe7f8b0f7991e" +dependencies = [ + "anyhow", + "async-compression", + "backoff", + "built", + "byteorder", + "futures", + "futures-util", + "gethostname", + "hyper 0.14.32", + "lazy_static", + "log", + "matches", + "regex", + "rustls 0.19.1", + "serde", + "serde_json", + "snap", + "thiserror 1.0.69", + "tokio", + "tokio-io-timeout", + "tokio-rustls 0.22.0", +] + +[[package]] +name = "tokio-openssl" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59df6849caa43bb7567f9a36f863c447d95a11d5903c9cc334ba32576a27eadd" +dependencies = [ + "openssl", + "openssl-sys", + "tokio", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.13" +source = "git+https://github.com/encoredev/rust-postgres?branch=encore-patches-sync#e71577eea1d14769a3021b6dd214448e3de3ffb6" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "constant_time_eq", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot", + "percent-encoding", + "phf 0.11.3", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "rand 0.9.1", + "socket2", + "tokio", + "tokio-util", + "whoami", +] + +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" +dependencies = [ + "rustls 0.19.1", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.4", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls 0.23.20", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-test" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" +dependencies = [ + "async-stream", + "bytes", + "futures-core", + "tokio", + "tokio-stream", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" +dependencies = [ + "futures-util", + "log", + "rustls 0.22.4", + "rustls-native-certs 0.7.3", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.25.0", + "tungstenite 0.21.0", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.24.0", +] + +[[package]] +name = "tokio-util" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "futures-util", + "hashbrown 0.14.5", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.7.0", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tonic" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +dependencies = [ + "async-stream", + "async-trait", + "axum 0.6.20", + "base64 0.21.7", + "bytes", + "flate2", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost 0.12.6", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", + "tokio", + "tokio-rustls 0.24.1", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", + "webpki-roots", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" +dependencies = [ + "bitflags 2.9.4", + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "http-range-header", + "httpdate", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "trackable" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98abb9e7300b9ac902cc04920945a874c1973e08c310627cc4458c04b70dd32" +dependencies = [ + "trackable 1.3.0", + "trackable_derive", +] + +[[package]] +name = "trackable" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15bd114abb99ef8cee977e517c8f37aee63f184f2d08e3e6ceca092373369ae" +dependencies = [ + "trackable_derive", +] + +[[package]] +name = "trackable_derive" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebeb235c5847e2f82cfe0f07eb971d1e5f6804b18dac2ae16349cc604380f82f" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "triomphe" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" +dependencies = [ + "serde", + "stable_deref_trait", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 1.2.0", + "httparse", + "log", + "rand 0.8.5", + "rustls 0.22.4", + "rustls-pki-types", + "sha1", + "thiserror 1.0.69", + "url", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 1.2.0", + "httparse", + "log", + "rand 0.8.5", + "sha1", + "thiserror 1.0.69", + "utf-8", +] + +[[package]] +name = "txtar" +version = "1.0.0" +dependencies = [ + "assert_fs", + "clean-path", + "predicates 2.1.5", + "similar-asserts", + "thiserror 1.0.69", +] + +[[package]] +name = "typed-arena" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" + +[[package]] +name = "typeid" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e13db2e0ccd5e14a544e8a246ba2312cd25223f616442d7f2cb0e3db614236e" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-id" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10103c57044730945224467c09f71a4db0071c123a0648cc3e818913bde6b561" + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" +dependencies = [ + "value-bag-serde1", + "value-bag-sval2", +] + +[[package]] +name = "value-bag-serde1" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bb773bd36fd59c7ca6e336c94454d9c66386416734817927ac93d81cb3c5b0b" +dependencies = [ + "erased-serde", + "serde", + "serde_fmt", +] + +[[package]] +name = "value-bag-sval2" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a916a702cac43a88694c97657d449775667bcd14b70419441d05b7fea4a83a" +dependencies = [ + "sval", + "sval_buffer", + "sval_dynamic", + "sval_fmt", + "sval_json", + "sval_ref", + "sval_serde", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.95", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" +dependencies = [ + "ring 0.16.20", + "untrusted 0.7.1", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "whoami" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" +dependencies = [ + "redox_syscall", + "wasite", + "web-sys", +] + +[[package]] +name = "wide" +version = "0.7.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce5da8ecb62bcd8ec8b7ea19f69a51275e91299be594ea5cc6ef7819e16cd03" +dependencies = [ + "bytemuck", + "safe_arch", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d107f8c6e916235c4c01cabb3e8acf7bea8ef6a63ca2e7fa0527c049badfc48c" +dependencies = [ + "winapi", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags 2.9.4", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "xid" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3752a194518cdee5d019812fb7978c51d8f0b7cfe9ace5983df1780964bb84c0" +dependencies = [ + "crc32fast", + "hostname", + "md5", + "once_cell", + "rand 0.8.5", + "sysctl", + "thiserror 1.0.69", + "winreg 0.8.0", +] + +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.95", +] + +[[package]] +name = "zstd" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.13+zstd.1.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000000..fed5c568c6 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,21 @@ +[workspace] +resolver = "2" +members = ["runtimes/core", "runtimes/js", "tsparser", "supervisor"] + +[profile.dev.package] +insta.opt-level = 3 + +[profile.release] +lto = true + +[patch.crates-io] +tokio-postgres = { git = "https://github.com/encoredev/rust-postgres", branch = "encore-patches-sync" } +postgres-protocol = { git = "https://github.com/encoredev/rust-postgres", branch = "encore-patches-sync" } +postgres-types = { git = "https://github.com/encoredev/rust-postgres", branch = "encore-patches-sync" } +swc_ecma_parser = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } +swc_ecma_ast = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } +swc_ecma_transforms_base = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } +swc_atoms = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } +swc_common = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } +swc_ecma_loader = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } +swc_ecma_visit = { git = "https://github.com/encoredev/swc", branch = "node-resolve-exports" } diff --git a/Cross.toml b/Cross.toml new file mode 100644 index 0000000000..4a264df650 --- /dev/null +++ b/Cross.toml @@ -0,0 +1,12 @@ +[build] +pre-build = [ + "apt-get install unzip &&", + "curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-x86_64.zip &&", + "unzip protoc-24.4-linux-x86_64.zip -d /usr/local &&", + "rm protoc-24.4-linux-x86_64.zip &&", + "export PATH=$PATH:/usr/local/bin", +] + +[build.env] +volumes = ["ENCORE_WORKDIR"] +passthrough = ["TYPE_DEF_TMP_PATH", "ENCORE_VERSION"] \ No newline at end of file diff --git a/DEVELOPING.md b/DEVELOPING.md deleted file mode 100644 index b7e6cf1947..0000000000 --- a/DEVELOPING.md +++ /dev/null @@ -1,44 +0,0 @@ -# Developing Encore - -Building an Encore application requires access to both the Encore runtime (the `encore.dev` package) as well as a custom-built -([Go runtime](https://github.com/encoredev/go)) to implement Encore's request semantics and automated instrumentation. - -As a result the Encore Daemon must know where these two things exist on the filesystem in order to properly compile the Encore application. - -This must be done in one of two ways: embedding the installation path at compile time (similar to `GOROOT`) -or by setting an environment variable at runtime. - -The environment variables are: -- `ENCORE_GOROOT` – the path to encore-go on disk -- `ENCORE_RUNTIME_PATH` – the path to the `encore.dev` runtime implementation. - -`ENCORE_RUNTIME_PATH` can be set to location of the `compiler/runtime` package in this repository, -while `ENCORE_GOROOT` must be pointed to where `encore-go` was built. - -For more information on this see [cli/internal/env/env.go](cli/internal/env/env.go). - -## Architecture - -The code base is divided into several parts: - -### cli -The `encore` command line interface. The encore background daemon -is located at `cli/daemon` and is responsible for managing processes, -setting up databases and talking with the Encore servers for operations like -fetching production logs. - -### parser -The Encore Parser statically analyzes Encore apps to build up a model -of the application dubbed the Encore Syntax Tree (EST) that lives in -`parser/est`. - -For speed the parser does not perform traditional type-checking; it does -limited type-checking for enforcing Encore-specific rules but otherwise -relies on the underlying Go compiler to perform type-checking as part of -building the application. - -### compiler -The Encore Compiler rewrites the source code based on the parsed -Encore Syntax Tree to create a fully functioning application. -It rewrites API calls & API handlers, injects instrumentation -and secret values, and more. \ No newline at end of file diff --git a/README.md b/README.md index 09ca89b986..98b159f113 100644 --- a/README.md +++ b/README.md @@ -1,216 +1,335 @@ -# Encore - The Go backend framework with superpowers +

+encore icon


+Open Source Framework for creating type-safe distributed systems with declarative infrastructure
+

- +- **Backend Frameworks:** [Encore.ts](https://encore.dev) and [Encore.go](https://encore.dev/go) simplify creating microservices and type-safe APIs, and provide an AI-ready declarative approach to define infrastructure in code. +- **Local Development:** Encore's CLI automatically manages local infrastructure and provides a development dashboard with tracing, service catalog, and architecture diagrams. +- **Infrastructure Integration:** Simplified integration with cloud infrastructure using the open source CLI ([learn more](https://encore.dev/docs/ts/self-host/build)), or using the optional [Encore Cloud](https://encore.cloud) platform to automate DevOps and infrastructure provisioning in your cloud on AWS and GCP. -https://encore.dev +**⭐ Star this repository** to help spread the word. -Encore is a Go backend framework for rapidly creating APIs and distributed systems. +**💿 Install Encore:** +- **macOS:** `brew install encoredev/tap/encore` +- **Linux:** `curl -L https://encore.dev/install.sh | bash` +- **Windows:** `iwr https://encore.dev/install.ps1 | iex` -It uses static analysis and code generation to reduce the boilerplate you have to write, -resulting in an extremely productive developer experience. +**🕹 Create your first app:** +- **TypeScript:** `encore app create --example=ts/hello-world` +- **Go:** `encore app create --example=hello-world` -The key features of Encore are: +**🤖 Add Encore LLM instructions to your app:** +- **Encore.ts:** [ts_llm_instructions.txt](./ts_llm_instructions.txt) +- **Encore.go:** [go_llm_instructions.txt](./go_llm_instructions.txt) +- **How to use:** + - Cursor: Rename the file to `.cursorrules`. + - GitHub Copilot: Paste content in `.github/copilot-instructions.md`. + - For other tools, place the file in your app root. + +**⚡️ Build your first Encore.ts app with AI using Leap.new** +- [**Leap**](https://leap.new) is an AI developer agent for building full-stack Encore.ts applications. It's the fastest way to build your first app. -* **No boilerplate**: Encore drastically reduces the boilerplate needed to set up - a production ready backend application. Define backend services, API endpoints, - and call APIs with a single line of Go code. +**🧩 See example apps:** [Example Apps Repo](https://github.com/encoredev/examples/) -* **Distributed Tracing**: Encore uses a combination of static analysis and code - generation to automatically instrument your application for excellent observability. - Automatically captures information about API calls, goroutines, HTTP requests, - database queries, and more. Automatically works for local development as well - as in production. +**🚀 See products being build with Encore:** [Showcase](https://encore.cloud/showcase) -* **Infrastructure Provisioning**: Encore understands how your application works, - and uses that understanding to provision and manage your cloud infrastructure. - Automatically works with all the major cloud providers, as well as for local development. +**👋 Have questions?** Join the friendly developer community on [Discord](https://encore.dev/discord). -* **Simple Secrets**: Encore makes it easy to store and securely use secrets and API keys. - Never worry about how to store and get access to secret values again. +**📞 Talk to a human:** [Book a 1:1 demo](https://encore.dev/book) with one of our founders. -* **API Documentation**: Encore parses your source code to understand the request/response - schemas for all your APIs. Encore can automatically generate high-quality, interactive - API Documentation for you. It can also automatically generate type-safe, documented - clients for your frontends. +## 🍿 Intro video +[Watch the intro video](https://youtu.be/vvqTGfoXVsw) for a quick introduction to Encore concepts & code examples. -**Read the complete documentation at [encore.dev/docs](https://encore.dev/docs).** +Encore Intro Video -## Quick Start +## Introduction to Encore -### Install -```bash -# macOS -brew install encoredev/tap/encore -# Linux -curl -L https://encore.dev/install.sh | bash -# Windows -iwr https://encore.dev/install.ps1 | iex -``` +Building scalable applications with cloud services is powerful but often frustrating. Developers face complex setups and repetitive tasks that slow them down. -### Create your app -```bash -encore app create my-app -cd my-app -encore run -``` +Encore solves this with an all-in-one backend development toolkit, streamlining everything from local testing to cloud integration and DevOps. -### Deploy -```bash -git push encore -``` +

+Encore Overview +

-#### Setup Demo -[![Setup demo](https://asciinema.org/a/406681.svg)](https://asciinema.org/a/406681) +### How it works -## Superpowers +Encore's open source backend frameworks [Encore.ts](https://encore.dev/docs/ts) and [Encore.go](https://encore.dev/docs/primitives/overview) enable you to define resources like services, databases, cron jobs, and Pub/Sub, as type-safe objects in your application code. -Encore comes with tons of superpowers that radically simplify backend development compared to traditional frameworks: +With the frameworks you only define **infrastructure semantics** — _the things that matter to your application's behavior_ — not configuration for _specific_ cloud services. Encore parses your application and builds a graph of both its logical architecture and its infrastructure requirements, it then automatically generates boilerplate and helps orchestrate the relevant infrastructure for each environment. This means the same application code can be used to run locally, test in preview environments, and deploy to cloud environments on e.g. AWS and GCP. -- A state of the art developer experience with unmatched productivity -- Define services, APIs, and make API calls with a single line of Go code -- Autocomplete and get compile-time checks for API calls -- Generates beautiful API docs and API clients automatically -- Instruments your app with Distributed Tracing, logs, and metrics – automatically -- Runs serverlessly on Encore's cloud, or deploys to your own favorite cloud -- Sets up dedicated Preview Environments for your pull requests -- Supports flexible authentication -- Manages your databases and migrates them automatically -- Provides an extremely simple yet secure secrets management -- And lots more... +This often removes the need for separate infrastructure configuration like Terraform, increases standardization in both your codebase and infrastructure, and makes your application highly portable across cloud providers. -## Using Encore +Encore is fully open source, there is **no proprietary code running in your application**. -Encore makes it super easy to create backend services and APIs. +### Example: Hello World -### Creating a service with an API +Defining microservices and API endpoints is incredibly simple—with less than 10 lines of code, you can create a production-ready, deployable service. -In Encore, a backend service is just a regular Go package with one or more APIs defined. -The Go package name becomes the service name (which must be unique within your app). +**Hello World in Encore.ts** -```go -package greet +```typescript +import { api } from "encore.dev/api"; -import ( - "context" - "fmt" -) +export const get = api( + { expose: true, method: "GET", path: "/hello/:name" }, + async ({ name }: { name: string }): Promise => { + const msg = `Hello ${name}!`; + return { message: msg }; + } +); -type Params struct { - Name string +interface Response { + message: string; } +``` -type Response struct { - Message string +**Hello World in Encore.go** + +```go +package hello + +//encore:api public path=/hello/:name +func World(ctx context.Context, name string) (*Response, error) { + msg := fmt.Sprintf("Hello, %s!", name) + return &Response{Message: msg}, nil } -//encore:api public -func Person(ctx context.Context, params *Params) (*Response, error) { - msg := fmt.Sprintf("Hello, %s!", params.Name) - return &Response{Message: msg}, nil +type Response struct { + Message string } ``` -This creates a backend service named `greet`, with a single API endpoint named `Person`. +### Example: Using Pub/Sub -Calling it is easy: -```bash -$ encore run # run the app in a separate terminal -$ curl http://localhost:4060/greet.Person -d '{"Name": "Jane"}' -# Outputs: {"Message": "Hello, Jane!"} -``` +If you want a Pub/Sub Topic, you declare it directly in your application code and Encore will integrate the infrastructure and generate the boilerplate code necessary. +Encore supports the following Pub/Sub infrastructure: +- **NSQ** for local environments (automatically provisioned by Encore's CLI) +- **GCP Pub/Sub** for environments on GCP +- **SNS/SQS** for environments on AWS -[Learn more in the Encore docs](https://encore.dev/docs/concepts/services-and-apis). +**Using Pub/Sub in Encore.ts** -### Calling an API endpoint -Calling an API endpoint from another endpoint is easy. +```typescript +import { Topic } "encore.dev/pubsub" -Just import the service (with a regular Go import), and then call the function -as if it were a regular Go function: +export interface SignupEvent { + userID: string; +} + +export const signups = new Topic("signups", { + deliveryGuarantee: "at-least-once", +}); +``` + +**Using Pub/Sub in Encore.go** ```go -import "my.app/greet" - -func MyAPI(ctx context.Context) error { - resp, err := greet.Person(ctx, &greet.Params{Name: "John"}) - if err != nil { - fmt.Println("The greeting message is:", resp.Message) - } - return err -} +import "encore.dev/pubsub" + +type User struct { /* fields... */ } + +var Signup = pubsub.NewTopic[*User]("signup", pubsub.TopicConfig{ + DeliveryGuarantee: pubsub.AtLeastOnce, +}) + +// Publish messages by calling a method +Signup.Publish(ctx, &User{...}) ``` -Encore uses its static analysis and code generation to turn this into a proper API call. +### Learn more in the docs -[Learn more in the Encore docs](https://encore.dev/docs/concepts/services-and-apis). +See how to use the backend frameworks in the docs: -### SQL Databases +- **Services:** [Go](https://encore.dev/docs/go/primitives/services) / [TypeScript](https://encore.dev/docs/ts/primitives/services) +- **APIs:** [Go](https://encore.dev/docs/go/primitives/defining-apis) / [TypeScript](https://encore.dev/docs/ts/primitives/defining-apis) +- **Databases:** [Go](https://encore.dev/docs/go/primitives/databases) / [TypeScript](https://encore.dev/docs/ts/primitives/databases) +- **Cron Jobs:** [Go](https://encore.dev/docs/go/primitives/cron-jobs) / [TypeScript](https://encore.dev/docs/ts/primitives/cron-jobs) +- **Pub/Sub:** [Go](https://encore.dev/docs/go/primitives/pubsub) / [TypeScript](https://encore.dev/docs/ts/primitives/pubsub) +- **Object Storage:** [Go](https://encore.dev/docs/go/primitives/object-storage) / [TypeScript](https://encore.dev/docs/ts/primitives/object-storage) +- **Caching:** [Go](https://encore.dev/docs/go/primitives/caching) / TypeScript (Coming soon) -Encore automatically provisions, connects to, and performs schema migrations of SQL databases for you. -All you have to do is define the SQL migrations: +## Using Encore: An end-to-end workflow from local to cloud -```sql --- greet/migrations/1_create_table.up.sql -CREATE TABLE person ( - name TEXT PRIMARY KEY, - count INT NOT NULL -); -``` +Encore provides purpose-built tooling for each step in the development process, from local development and testing, to cloud DevOps. Here we'll cover the key features for each part of the process. -Then import `encore.dev/storage/sqldb` and just start querying: +### Local Development -```go -// genGreeting generates a personalized greeting for the given name. -func genGreeting(ctx context.Context, name string) (string, error) { - var count int - // Insert the row, and increment count if the row is already in the db. - err := sqldb.QueryRow(ctx, ` - INSERT INTO "person" (name, count) - VALUES ($1, 1) - ON CONFLICT (name) DO UPDATE - SET count = person.count + 1 - RETURNING count - `, name).Scan(&count) - if err != nil { - return "", err - } - - switch count { - case 1: - return fmt.Sprintf("Nice to meet you, %s!", name), nil - case 2: - return fmt.Sprintf("Hi again, %s!", name), nil - default: - return fmt.Sprintf("Good to see you, %s! We've met %d times before.", name, count-1), nil - } -} -``` +

+Local Development +

+ +When you run your app locally using the [Encore CLI](https://encore.dev/docs/install), Encore parses your code and automatically sets up the necessary local infrastructure on the fly. _No more messing around with Docker Compose!_ + +You also get built-in tools for an efficient workflow when creating distributed systems and event-driven applications: + +- **Local environment matches cloud:** Encore automatically handles the semantics of service communication and interfacing with different types of infrastructure services, so that the local environment is a 1:1 representation of your cloud environment. +- **Cross-service type-safety:** When building microservices applications with Encore, you get type-safety and auto-complete in your IDE when making cross-service API calls. +- **Type-aware infrastructure:** With Encore, infrastructure like Pub/Sub queues are type-aware objects in your program. This enables full end-to-end type-safety when building event-driven applications. +- **Secrets management:** Built-in [secrets management](https://encore.dev/docs/ts/primitives/secrets) for all environments. +- **Tracing:** The [local development dashboard](https://encore.dev/docs/ts/observability/dev-dash) provides local tracing to help understand application behavior and find bugs. +- **Automatic API docs & clients:** Encore generates [API docs](https://encore.dev/docs/ts/obsevability/service-catalog) and [API clients](https://encore.dev/docs/ts/cli/client-generation) in Go, TypeScript, JavaScript, and OpenAPI specification. + +_Here's a video showing the local development dashboard:_ + +https://github.com/encoredev/encore/assets/78424526/4d066c76-9e6c-4c0e-b4c7-6b2ba6161dc8 + +### Testing + +

+testing +

+ +Encore comes with several built-in tools to help with testing: + +- **Built-in service/API mocking:** Encore provides built-in support for [mocking API calls](https://encore.dev/docs/go/develop/testing/mocking), and interfaces for automatically generating mock objects for your services. +- **Local test infra:** When running tests locally, Encore automatically provides dedicated [test infrastructure](https://encore.dev/docs/go/develop/testing#test-only-infrastructure) to isolate individual tests. +- **Local test tracing:** The [Local Development Dashboard](https://encore.dev/docs/ts/observability/dev-dash) provides distributed tracing for tests, providing great visibility into what's happening and making it easier to understand why a test failed. +- **Preview Environments:** When using Encore Cloud (optional), it automatically provisions a temporary cloud-based [Preview Environment](https://encore.dev/docs/platform/deploy/preview-environments) for each Pull Request, an effective tool when doing end-to-end testing. + +### Optional: Automate your AWS/GCP with Encore Cloud + +

+DevOps +

+ +Encore Cloud is Encore's managed service offering for teams wanting to focus their engineering effort on their product development, avoiding investing time in platformization and DevOps. + +Encore Cloud provides **automatic infrastructure provisioning in your cloud on AWS & GCP**. So instead of writing Terraform, YAML, or clicking in cloud consoles, you [connect your cloud account](https://encore.dev/docs/platform/deploy/own-cloud) and simply deploy your application. Since using Encore's open source backend frameworks means your application code is cloud agnostic and not tied to any specific infrastructure services, Encore Cloud enables you to change your infrastructure depending on your evolving needs, without needing to make code changes or manually update infrastructure config files. + +When you deploy, Encore Cloud automatically provisions [infrastructure](https://encore.dev/docs/platform/infrastructure/infra) using battle-tested cloud services on AWS and GCP, such as: +- **Compute:** GCP Cloud Run, AWS Fargate, Kubernetes (GKE and EKS) +- **Databases:** GCP Cloud SQL, AWS RDS +- **Pub/Sub:** GCP Pub/Sub, AWS SQS/SNS +- **Caches:** GCP Memorystore, Amazon ElastiCache +- **Object Storage:** GCS, Amazon S3 +- **Secrets:** GCP Secret Manager, AWS Secrets Manager +- Etc. + +Encore Cloud also includes cloud versions of Encore's built-in development tools: + +- [Service Catalog & API Docs](https://encore.dev/docs/ts/observability/service-catalog) +- [Architecture Diagrams](https://encore.dev/docs/ts/observability/flow) +- [Tracing](https://encore.dev/docs/ts/observability/tracing) + +_Here's a video showing the Encore Cloud dashboard:_ + +https://github.com/encoredev/encore/assets/78424526/8116b387-d4d4-4e54-8768-3686ba0245f5 + +## Why use Encore? + +- **Faster Development**: Encore streamlines the development process with its backend frameworks, clear abstractions, and built-in local development tools. +- **Scalability & Performance**: Encore simplifies building large-scale microservices applications that can handle growing user bases and demands, without the normal boilerplate and complexity. +- **Control & Standardization**: Built-in tools like automated architecture diagrams, infrastructure tracking and approval workflows, make it easy for teams and leaders to get an overview of the entire application. +- **Security & Compliance**: Encore Cloud helps ensure your application is secure and compliant by enforcing security standards like least privilege IAM, and provisioning infrastructure according to best practices for each cloud provider. +- **Reduced Costs**: Encore Cloud's automatic infrastructure management minimizes wasteful cloud expenses and reduces DevOps workload, allowing you to work more efficiently. + +## Common use cases + +Encore is designed to give teams a productive and less complex experience when solving most backend use cases. Many teams use Encore to build things like: + +- High-performance B2B Platforms +- Fintech & Consumer apps +- Global E-commerce marketplaces +- Microservices backends for SaaS applications and mobile apps +- And much more... + +## Getting started + +- **1. Install Encore:** + - **macOS:** `brew install encoredev/tap/encore` + - **Linux:** `curl -L https://encore.dev/install.sh | bash` + - **Windows:** `iwr https://encore.dev/install.ps1 | iex` +- **2. Create your first app:** + - **TypeScript:** `encore app create --example=ts/introduction` + - **Go:** `encore app create --example=hello-world` +- **3. Star the project** on [GitHub](https://github.com/encoredev/encore) to stay up-to-date +- **4. Explore the [Documentation](https://encore.dev/docs)** to learn more about Encore's features +- **5. [Join Discord](https://encore.dev/discord)** to ask questions and meet other Encore developers + +## Open Source + +Everything needed to develop and deploy Encore applications is Open Source, including the backend frameworks, parser, compiler, runtime, and CLI. +This includes all code needed for local development and everything that runs in your application when it is deployed. + +The Open Source CLI also provides a mechanism to generate a Docker images for your application, so you easily self-host your application. [Learn more in the docs](https://encore.dev/docs/ts/self-host/build). + +## Join the most pioneering developer community + +Developers building with Encore are forward-thinkers who want to focus on creative programming and building great software to solve meaningful problems. It's a friendly place, great for exchanging ideas and learning new things! **Join the conversation on [Discord](https://encore.dev/discord).** + +We rely on your contributions and feedback to improve Encore for everyone who is using it. +Here's how you can contribute: + +- ⭐ **Star and watch this repository to help spread the word and stay up to date.** +- Meet fellow Encore developers and chat on [Discord](https://encore.dev/discord). +- Follow Encore on [Twitter](https://twitter.com/encoredotdev). +- Share feedback or ask questions via [email](mailto:hello@encore.dev). +- Leave feedback on the [Public Roadmap](https://encore.dev/roadmap). +- Send a pull request here on GitHub with your contribution. + +## Videos + +- Intro: Encore concepts & features +- Demo video: Getting started with Encore.ts +- Demo: Building and deploying a simple Go service +- Demo: Building an event-driven system in Go + +## Visuals + +### Code example (Go) + +https://github.com/encoredev/encore/assets/78424526/f511b3fe-751f-4bb8-a1da-6c9e0765ac08 + +### Local Development Dashboard + +https://github.com/encoredev/encore/assets/78424526/4c659fb8-e9ec-4f14-820b-c2b8d35e5359 + +### Generated Architecture Diagrams & Service Catalog + +https://github.com/encoredev/encore/assets/78424526/a880ed2d-e9a6-4add-b5a8-a4b44b97587b + +### Auto-Provisioning Infrastructure & Multi-cloud Deployments + +https://github.com/encoredev/encore/assets/78424526/8116b387-d4d4-4e54-8768-3686ba0245f5 + +### Distributed Tracing & Metrics + +https://github.com/encoredev/encore/assets/78424526/35189335-e3d7-4046-bab0-1af0f00d2504 + +## Frequently Asked Questions (FAQ) + +### Who's behind Encore? + +Encore was founded by long-time backend engineers from Spotify, Google, and Monzo with over 50 years of collective experience. We’ve lived through the challenges of building complex distributed systems with thousands of services, and scaling to hundreds of millions of users. + +Encore grew out of these experiences and is a solution to the frustrations that came with them: unnecessary crippling complexity and constant repetition of undifferentiated work that suffocates the developer’s creativity. With Encore, we want to set developers free to achieve their creative potential. + +### Who is Encore for? -#### Database Demo -[![Setting up a database](https://asciinema.org/a/406695.svg)](https://asciinema.org/a/406695) +**For individual developers** building for the cloud, Encore provides a radically improved experience. With Encore you’re able to stay in the flow state and experience the joy and creativity of building. -[Learn more in the Encore docs](https://encore.dev/docs/concepts/databases). +**For startup teams** who need to build a scalable backend to support the growth of their product, Encore lets them get up and running in the cloud within minutes. It lets them focus on solving the needs of their users, instead of spending most of their time re-solving the everyday challenges of building distributed systems in the cloud. -### API Documentation +**For individual teams in large organizations** that want to focus on innovating and building new features, Encore lets them stop spending time on operations and onboarding new team members. Using Encore for new feature development is easy, just spin up a new backend service in a few minutes. -Encore automatically generates API documentation for your app. +### How is Encore different? -You can access it by viewing the local development dashboard by opening the API URL -in your browser when your app is running (normally [localhost:4060](http://localhost:4060)). +Encore is the only tool that understands what you’re building. Encore uses static analysis to deeply understand the application you’re building. This enables a unique developer experience that helps you stay in the flow as you’re building. For instance, you don't need to bother with configuring and managing infrastructure, setting up environments and keeping them in sync, or writing documentation and drafting architecture diagrams. Encore does all of this automatically out of the box. -[![API Documentation](https://encore.dev/assets/img/api-docs-screenshot.png)](https://encore.dev/docs/concepts/api-docs) +### Why does Encore provide infrastructure integrations through Encore Cloud? -### Distributed Tracing +We've found that to meaningfully improve the developer experience, you have to operate across the full stack. Unless you understand how an application is deployed, there are a large number of things in the development process that you can't simplify. That's why so many other developer tools have such a limited impact. With Encore's more integrated approach, we're able to unlock a radically better experience for developers. -Encore automatically instruments your app with Distributed Tracing. +### What if I want to migrate away from Encore? -For local development you can access it by viewing the local development dashboard by opening the API URL -in your browser when your app is running (normally [localhost:4060](http://localhost:4060)). +Encore is designed to let you go outside of the framework when you want to, and easily drop down in abstraction level when you need to, so you never run into any dead-ends. -Any API calls to your app automatically produces traces. +Should you want to migrate away, it's straightforward and does not require a big rewrite. 99% of your code is regular Go or TypeScript. -![Automatic Tracing](https://encore.dev/assets/img/tracing.jpg) +Encore provides tools for [self-hosting](https://encore.dev/docs/ts/self-host/build) your application, by using the Open Source CLI to produce a standalone Docker image that can be deployed anywhere you'd like. -## Developing Encore and building from source +## Contributing to Encore and building from source -See [DEVELOPING.md](DEVELOPING.md). \ No newline at end of file +See [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/check.bash b/check.bash new file mode 100755 index 0000000000..d5d5e66557 --- /dev/null +++ b/check.bash @@ -0,0 +1,189 @@ +#!/usr/bin/env bash +# +# This script will run the same checks as Encore's CI pipeline and report the same static analysis errors +# as the pipeline by default. It can be used to check for what errors might be reported by the pipeline +# before you commit and open a PR. +# +# Usage: +# ./check.bash [options] +# +# Options: +# --base The merge base to compare against (default: origin/main) +# --diff Show the diff against base instead of running the checks +# --filter-mode The filter mode to use for reviewdog; added, file, diff_context, nofilter (default: file) +# --all Alias for `--filter-mode nofilter` (runs checks against all files in the working directory) +# +# Examples: +# +# # Run the checks against files changed since branching from origin/main +# # (This is the default behavior and what our CI process does) +# ./check.bash +# +# # Show the diff between the current working directory and origin/main +# ./check.bash --diff +# +# # Run the checks against the entire working directory (regardless of changes made) +# ./check.bash --all + + +############################################################################################################################## +# Step 0: Setup the script with basic error handling # +############################################################################################################################## + + set -euo pipefail + # nosemgrep + IFS=$'\n\t' + + function errHandler() { + echo "Exiting due to an error line $1" >&2 + echo "" >&2 + awk 'NR>L-4 && NR> ":""),$0 }' L="$1" "$0" >&2 + } + trap 'errHandler $LINENO' ERR + + +############################################################################################################################## +# Step 1: Configure the script with the parameters the use wants # +############################################################################################################################## + + # Parameters + WORK_DIR=$( dirname "${BASH_SOURCE[0]}" ) # Get the directory this script is in + BASE_REF="origin/main" # The merge base to compare against + DIFF_ONLY="false" # If true, show the diff instead of running the checks + FILTER_MODE="file" # The filter mode to use for reviewdog (added, file, diff_context, nofilter) + + # Parse the command line arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --base) + BASE_REF="$2" + shift 2 + ;; + --diff) + DIFF_ONLY="true" + shift 1 + ;; + --filter-mode) + FILTER_MODE="$2" + shift 2 + ;; + --all) + FILTER_MODE="nofilter" + shift 1 + ;; + *) + echo "Unknown argument: $1" + exit 1 + ;; + esac + done + + +############################################################################################################################## +# Step 2: Check for required tools and error out if anything is missing which we can't install for the user # +############################################################################################################################## + + # Check for tools we can't install using go + command -v go >/dev/null 2>&1 || { echo >&2 "go is required but not installed. Aborting."; exit 1; } + command -v git >/dev/null 2>&1 || { echo >&2 "git is required but not installed. Aborting."; exit 1; } + command -v sed >/dev/null 2>&1 || { echo >&2 "sed is required but not installed. Aborting."; exit 1; } + command -v semgrep >/dev/null 2>&1 || { echo >&2 "semgrep is required but not installed. Aborting."; exit 1; } + + # Now install all missing tools + command -v reviewdog >/dev/null 2>&1 || go install github.com/reviewdog/reviewdog/cmd/reviewdog@latest || { echo >&2 "Unable to install reviewdog. Aborting."; exit 1; } + command -v staticcheck >/dev/null 2>&1 || go install honnef.co/go/tools/cmd/staticcheck@latest || { echo >&2 "Unable to install staticcheck. Aborting."; exit 1; } + command -v errcheck >/dev/null 2>&1 || go install github.com/kisielk/errcheck@latest || { echo >&2 "Unable to install errcheck. Aborting."; exit 1; } + command -v ineffassign >/dev/null 2>&1 || go install github.com/gordonklaus/ineffassign@latest || { echo >&2 "Unable to install ineffassign. Aborting."; exit 1; } + + +############################################################################################################################## +# Step 3: Create a diff of the changes in the working directory against the common ancestor of the current branch and main # +# This will be used to run static analysis checks on only the files that have changed. This diff should mimic the # +# diff that would be created by GitHub when all current changes are committed and pushed into a PR on GitHub. # +############################################################################################################################## + + # Don't generate the diff if we don't need it to filter! + if [[ "$FILTER_MODE" != "nofilter" ]]; then + + # Create a temp directory to store the common ancestor commit + TMP_DIR=$(mktemp -d) + if [[ ! "$TMP_DIR" || ! -d "$TMP_DIR" ]]; then + echo "Could not create temp dir" + exit 1 + fi + + # Create a temp file to store the diff we need + DIFF_FILE=$(mktemp) + if [[ ! "$DIFF_FILE" || ! -f "$DIFF_FILE" ]]; then + echo "Could not create temp diff file" + exit 1 + fi + + # Create a blank file to use as a comparison when a file is missing because either it's new or been deleted + BLANK_FILE=$(mktemp) + if [[ ! "$BLANK_FILE" || ! -f "$BLANK_FILE" ]]; then + echo "Could not create blank file" + exit 1 + fi + + # Clean up on exit and delete all the temp files we just created + function cleanup() { + rm -rf "$TMP_DIR" + rm -f "$DIFF_FILE" + rm -f "$BLANK_FILE" + } + trap cleanup EXIT + + # Clone the repo into the temp directory + git clone -q "$WORK_DIR" "$TMP_DIR" + + # Change our temp directory to be a clean copy of the common ancestor commit + pushd "$TMP_DIR" > /dev/null + git reset -q --hard HEAD + git checkout -q "$(git merge-base "$BASE_REF" HEAD)" + TRACKED_FILES_FROM_MAIN=$(git ls-files) + popd > /dev/null + + # Create a list of files that we care about + MODIFICATIONS_IN_WORKING_DIR=$(git status --short | awk '{print $2}') + TRACKED_FILES_IN_WORKING_DIR=$(git ls-files) + ALL_FILES=$(echo "$TRACKED_FILES_IN_WORKING_DIR $MODIFICATIONS_IN_WORKING_DIR $TRACKED_FILES_FROM_MAIN" | tr ' ' '\n' | sort -u) + + # Create a diff of the changes in the working directory against the common ancestor of the current branch and main + for file in $ALL_FILES; do + # If the original file doesn't exist, use a blank file instead + # (This means it was a new file that was added in the current version of the code base) + ORIGINAL_FILE="$TMP_DIR/$file" + if [[ ! -f "$ORIGINAL_FILE" ]]; then + ORIGINAL_FILE="$BLANK_FILE" + fi + + # If the updated file doesn't exist, use a blank file instead + # (This means the file was deleted in the current version of the code base) + UPDATED_FILE="$WORK_DIR/$file" + if [[ ! -f "$UPDATED_FILE" ]]; then + UPDATED_FILE="$BLANK_FILE" + fi + + # Run git diff between the original file and the updated file + # Replace the file paths in the diff to match the relative path in the working directory + # Then write the diff into our diff file + git diff "$ORIGINAL_FILE" "$UPDATED_FILE" | sed "s|$ORIGINAL_FILE|/$file|g" | sed "s|$UPDATED_FILE|/$file|g" >> "$DIFF_FILE" || true # Suppress the exit code + done + + if [[ "$DIFF_ONLY" == "true" ]]; then + cat "$DIFF_FILE" + exit 0 + fi + fi + + +############################################################################################################################## +# Step 4: Run review dog using the diff we just created, allowing reviewdog to only show errors from changes we've made # +############################################################################################################################## + + if [[ "$FILTER_MODE" == "nofilter" ]]; then + reviewdog -filter-mode=nofilter + else + reviewdog -filter-mode="$FILTER_MODE" -diff="cat $DIFF_FILE" + fi diff --git a/cli/cmd/encore/app.go b/cli/cmd/encore/app.go deleted file mode 100644 index 3124deec05..0000000000 --- a/cli/cmd/encore/app.go +++ /dev/null @@ -1,628 +0,0 @@ -package main - -import ( - "archive/tar" - "bytes" - "compress/gzip" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "io/fs" - "io/ioutil" - "net/http" - "net/url" - "os" - "os/exec" - "path" - "path/filepath" - "strings" - "time" - - "encr.dev/cli/internal/conf" - "github.com/AlecAivazis/survey/v2" - "github.com/briandowns/spinner" - "github.com/fatih/color" - "github.com/spf13/cobra" - "github.com/tailscale/hujson" -) - -func init() { - appCmd := &cobra.Command{ - Use: "app", - Short: "Commands to create and link Encore apps", - } - rootCmd.AddCommand(appCmd) - - var createAppTemplate string - - createAppCmd := &cobra.Command{ - Use: "create [name]", - Short: "Create a new Encore app", - Args: cobra.MaximumNArgs(1), - Run: func(cmd *cobra.Command, args []string) { - name := "" - if len(args) > 0 { - name = args[0] - } - if err := createApp(context.Background(), name, createAppTemplate); err != nil { - fatal(err) - } - }, - } - appCmd.AddCommand(createAppCmd) - createAppCmd.Flags().StringVar(&createAppTemplate, "example", "", "URL to example code to use.") - - var forceLink bool - linkAppCmd := &cobra.Command{ - Use: "link [app-id]", - Short: "Link an Encore app with the server", - Args: cobra.MaximumNArgs(1), - Run: func(cmd *cobra.Command, args []string) { - var appID string - if len(args) > 0 { - appID = args[0] - } - linkApp(appID, forceLink) - }, - } - appCmd.AddCommand(linkAppCmd) - linkAppCmd.Flags().BoolVarP(&forceLink, "force", "f", false, "Force link even if the app is already linked.") - - cloneAppCmd := &cobra.Command{ - Use: "clone [app-id] [directory]", - Short: "Clone an Encore app to your computer", - Args: cobra.MinimumNArgs(1), - Run: func(c *cobra.Command, args []string) { - cmdArgs := append([]string{"clone", "encore://" + args[0]}, args[1:]...) - cmd := exec.Command("git", cmdArgs...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - os.Exit(1) - } - }, - } - appCmd.AddCommand(cloneAppCmd) -} - -// createApp is the implementation of the "encore app create" command. -func createApp(ctx context.Context, name, template string) (err error) { - yellow := color.New(color.FgYellow) - cyan := color.New(color.FgCyan) - green := color.New(color.FgGreen) - - if _, err := conf.CurrentUser(); errors.Is(err, fs.ErrNotExist) { - var loginNow bool - cyan.Fprintln(os.Stderr, "You are not logged in to encore.dev.") - survey.AskOne(&survey.Confirm{ - Message: cyan.Sprint("Log in now to automatically link your app with encore.dev?"), - Default: true, - }, &loginNow) - if loginNow { - if err := doLogin(); err != nil { - fatal(err) - } - } else { - yellow.Fprintln(os.Stderr, "Continuing without logging in. You can manually link your app later using `encore app link`.") - } - } - - if name == "" { - survey.AskOne(&survey.Input{ - Message: "App Name (lowercase letters, digits, and dashes)", - }, &name, survey.WithValidator(func(in interface{}) error { return validateName(in.(string)) })) - } - - if err := validateName(name); err != nil { - return err - } else if _, err := os.Stat(name); err == nil { - return fmt.Errorf("directory %s already exists", name) - } - - if template == "" { - var idx int - - dockerMsg := "" - if _, err := exec.LookPath("docker"); err != nil { - dockerMsg = " [requires Docker]" - } - prompt := &survey.Select{ - Message: "Select app template:", - Options: []string{ - "Trello clone" + dockerMsg, - "Hello World", - "Empty app", - }, - } - survey.AskOne(prompt, &idx) - switch idx { - case 0: - template = "trello-clone" - case 1: - template = "hello-world" - case 2: - template = "" - } - } - - // Parse template information, if provided. - var ex *repoInfo - if template != "" { - var err error - ex, err = parseTemplate(ctx, template) - if err != nil { - return err - } - } - - if err := os.Mkdir(name, 0755); err != nil { - return err - } - defer func() { - if err != nil { - // Clean up the directory we just created in case of an error. - os.RemoveAll(name) - } - }() - - if ex != nil { - s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) - s.Prefix = fmt.Sprintf("Downloading template %s ", ex.Name) - s.Start() - err := downloadAndExtractTemplate(ctx, name, *ex) - s.Stop() - fmt.Println() - - if err != nil { - return fmt.Errorf("failed to download template %s: %v", ex.Name, err) - } - gray := color.New(color.Faint) - gray.Printf("Downloaded template %s.\n", ex.Name) - } else { - // Set up files that we need when we don't have an example - if err := ioutil.WriteFile(filepath.Join(name, ".gitignore"), []byte("/.encore\n"), 0644); err != nil { - fatal(err) - } - encoreModData := []byte("module encore.app\n") - if err := ioutil.WriteFile(filepath.Join(name, "go.mod"), encoreModData, 0644); err != nil { - fatal(err) - } - } - - // Create the app on the server. - _, err = conf.CurrentUser() - loggedIn := err == nil - - var app *appConf - if loggedIn { - s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) - s.Prefix = "Creating app on encore.dev " - s.Start() - app, err = createAppOnServer(name) - s.Stop() - if err != nil { - return fmt.Errorf("creating app on encore.dev: %v", err) - } - } - - // Create the encore.app file - var encoreAppData []byte - if loggedIn { - encoreAppData = []byte(`{ - "id": "` + app.Slug + `", -} -`) - } else { - encoreAppData = []byte(`{ - // The app is not currently linked to the encore.dev platform. - // Use "encore app link" to link it. - "id": "", -} -`) - } - if err := ioutil.WriteFile(filepath.Join(name, "encore.app"), encoreAppData, 0644); err != nil { - return err - } - - if err := initGitRepo(name, app); err != nil { - return err - } - - green.Printf("\nSuccessfully created app %s!\n", name) - cyanf := cyan.SprintfFunc() - if app != nil { - fmt.Printf("App ID: %s\n", cyanf(app.Slug)) - fmt.Printf("Web URL: %s%s", cyanf("https://app.encore.dev/"+app.Slug), newline) - } - - fmt.Print("\nUseful commands:\n\n") - - cyan.Printf(" encore run\n") - fmt.Print(" Run your app locally\n\n") - - cyan.Printf(" encore test ./...\n") - fmt.Print(" Run tests\n\n") - - if app != nil { - cyan.Printf(" git push encore\n") - fmt.Print(" Deploys your app\n\n") - } - - greenBoldF := green.Add(color.Bold).SprintfFunc() - fmt.Printf("Get started now: %s\n", greenBoldF("cd %s && encore run", name)) - - return nil -} - -func validateName(name string) error { - ln := len(name) - if ln == 0 { - return fmt.Errorf("name must not be empty") - } else if ln > 50 { - return fmt.Errorf("name too long (max 50 chars)") - } - - for i, s := range name { - // Outside of [a-z], [0-9] and != '-'? - if !((s >= 'a' && s <= 'z') || (s >= '0' && s <= '9') || s == '-') { - return fmt.Errorf("name must only contain lowercase letters, digits, or dashes") - } else if s == '-' { - if i == 0 { - return fmt.Errorf("name cannot start with a dash") - } else if (i + 1) == ln { - return fmt.Errorf("name cannot end with a dash") - } else if name[i-1] == '-' { - return fmt.Errorf("name cannot contain repeated dashes") - } - } - } - return nil -} - -type appConf struct { - Slug string `json:"slug"` - DefaultBranch *string `json:"main_branch"` -} - -func createAppOnServer(name string) (*appConf, error) { - if _, err := conf.CurrentUser(); err != nil { - return nil, err - } - - url := "https://api.encore.dev/apps" - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) - defer cancel() - reqData, _ := json.Marshal(map[string]string{"Name": name}) - req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(reqData)) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", "application/json") - var respData struct { - Data appConf - } - err = slurpJSON(req, &respData) - return &respData.Data, err -} - -func validateAppID(id string) (ok bool, err error) { - if _, err := conf.CurrentUser(); errors.Is(err, fs.ErrNotExist) { - fatal("not logged in. Run 'encore auth login' first.") - } else if err != nil { - return false, err - } - - url := "https://api.encore.dev/apps/" + url.PathEscape(id) - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) - defer cancel() - req, err := http.NewRequestWithContext(ctx, "POST", url, nil) - if err != nil { - return false, err - } - req.Header.Set("Content-Type", "application/json") - - client := conf.AuthClient() - resp, err := client.Do(req) - if err != nil { - return false, err - } - defer resp.Body.Close() - switch resp.StatusCode { - case 200: - return true, nil - case 404: - return false, nil - default: - body, _ := ioutil.ReadAll(resp.Body) - return false, fmt.Errorf("server responded with HTTP %s: %s", resp.Status, body) - } -} - -type repoInfo struct { - Owner string - Repo string - Branch string - Path string // subdirectory to copy ("." for whole project) - Name string // example name -} - -func parseTemplate(ctx context.Context, tmpl string) (*repoInfo, error) { - switch { - case strings.HasPrefix(tmpl, "http"): - // Already an URL; do nothing - case strings.HasPrefix(tmpl, "github.com"): - // Assume a URL without the scheme - tmpl = "https://" + tmpl - default: - // Simple template name - tmpl = "https://github.com/encoredev/examples/tree/main/" + tmpl - } - - u, err := url.Parse(tmpl) - if err != nil { - return nil, fmt.Errorf("invalid template: %v", err) - } - if u.Host != "github.com" { - return nil, fmt.Errorf("template must be hosted on GitHub, not %s", u.Host) - } - // Path must be one of: - // "/owner/repo" - // "/owner/repo/tree/" - // "/owner/repo/tree//path" - parts := strings.SplitN(u.Path, "/", 6) - switch { - case len(parts) == 3: // "/owner/repo" - owner, repo := parts[1], parts[2] - // Check the default branch - var resp struct { - DefaultBranch string `json:"default_branch"` - } - url := fmt.Sprintf("https://api.github.com/repos/%s/%s", owner, repo) - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, err - } else if err := slurpJSON(req, &resp); err != nil { - return nil, err - } - return &repoInfo{ - Owner: owner, - Repo: repo, - Branch: resp.DefaultBranch, - Path: ".", - Name: repo, - }, nil - case len(parts) >= 5: // "/owner/repo" - owner, repo, t, branch := parts[1], parts[2], parts[3], parts[4] - p := "." - name := repo - if len(parts) == 6 { - p = parts[5] - name = path.Base(p) - } - if t != "tree" { - return nil, fmt.Errorf("unsupported template url: %s", tmpl) - } - return &repoInfo{ - Owner: owner, - Repo: repo, - Branch: branch, - Path: p, - Name: name, - }, nil - default: - return nil, fmt.Errorf("unsupported template url: %s", tmpl) - } -} - -func downloadAndExtractTemplate(ctx context.Context, dst string, info repoInfo) error { - url := fmt.Sprintf("https://codeload.github.com/%s/%s/tar.gz/%s", info.Owner, info.Repo, info.Branch) - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return err - } - resp, err := http.DefaultClient.Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - if resp.StatusCode != 200 { - return fmt.Errorf("GET %s: got non-200 response: %s", url, resp.Status) - } - gz, err := gzip.NewReader(resp.Body) - if err != nil { - return fmt.Errorf("could not read gzip response: %v", err) - } - defer gz.Close() - tr := tar.NewReader(gz) - - prefix := path.Join(info.Repo+"-"+info.Branch, info.Path) - prefix += "/" - files := 0 - for { - hdr, err := tr.Next() - if err == io.EOF { - if files == 0 { - return fmt.Errorf("could not find template") - } - return nil - } else if err != nil { - return fmt.Errorf("reading repo data: %v", err) - } - if hdr.FileInfo().IsDir() { - continue - } - if p := path.Clean(hdr.Name); strings.HasPrefix(p, prefix) { - files++ - p = p[len(prefix):] - filePath := filepath.Join(dst, filepath.FromSlash(p)) - if err := createFile(tr, filePath); err != nil { - return fmt.Errorf("create %s: %v", p, err) - } - } - } -} - -func createFile(src io.Reader, dst string) error { - if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { - return err - } - f, err := os.OpenFile(dst, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0644) - if err != nil { - return err - } - _, err = io.Copy(f, src) - if err2 := f.Close(); err == nil { - err = err2 - } - return err -} - -func slurpJSON(req *http.Request, respData interface{}) error { - resp, err := conf.AuthClient().Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - if resp.StatusCode != 200 { - body, _ := ioutil.ReadAll(resp.Body) - return fmt.Errorf("got non-200 response: %s: %s", resp.Status, body) - } - if err := json.NewDecoder(resp.Body).Decode(respData); err != nil { - return fmt.Errorf("could not decode response: %v", err) - } - return nil -} - -// initGitRepo initializes the git repo. -// If app is not nil, it configures the repo to push to the given app. -// If git does not exist, it reports an error matching exec.ErrNotFound. -func initGitRepo(path string, app *appConf) (err error) { - defer func() { - if e := recover(); e != nil { - if ee, ok := e.(error); ok { - err = ee - } else { - panic(e) - } - } - }() - - git := func(args ...string) []byte { - cmd := exec.Command("git", args...) - cmd.Dir = path - out, err := cmd.CombinedOutput() - if err != nil && err != exec.ErrNotFound { - panic(fmt.Errorf("git %s: %s (%w)", strings.Join(args, " "), out, err)) - } - return out - } - - // Initialize git repo - git("init") - if app != nil && app.DefaultBranch != nil { - git("checkout", "-b", *app.DefaultBranch) - } - git("config", "--local", "push.default", "current") - git("add", "-A") - - cmd := exec.Command("git", "commit", "-m", "Initial commit") - cmd.Dir = path - // Configure the committer if the user hasn't done it themselves yet. - if ok, _ := gitUserConfigured(); !ok { - cmd.Env = append(os.Environ(), - "GIT_AUTHOR_NAME=Encore", - "GIT_AUTHOR_EMAIL=git-bot@encore.dev", - "GIT_COMMITTER_NAME=Encore", - "GIT_COMMITTER_EMAIL=git-bot@encore.dev", - ) - } - if out, err := cmd.CombinedOutput(); err != nil && err != exec.ErrNotFound { - return fmt.Errorf("create initial commit repository: %s (%v)", out, err) - } - - if app != nil { - git("remote", "add", "encore", "encore://"+app.Slug) - } - - return nil -} - -func addEncoreRemote(root, appID string) { - // Determine if there are any remotes - cmd := exec.Command("git", "remote") - cmd.Dir = root - out, err := cmd.CombinedOutput() - if err != nil { - return - } - out = bytes.TrimSpace(out) - if len(out) == 0 { - cmd = exec.Command("git", "remote", "add", "encore", "encore://"+appID) - cmd.Dir = root - if err := cmd.Run(); err == nil { - fmt.Println("Configured git remote 'encore' to push/pull with Encore.") - } - } -} - -func linkApp(appID string, force bool) { - root, _ := determineAppRoot() - filePath := filepath.Join(root, "encore.app") - - // Parse the app data using a map so we preserve all - // the keys present when writing it back below. - var appData map[string]interface{} - if data, err := ioutil.ReadFile(filePath); err != nil { - fatal(err) - os.Exit(1) - } else if err := hujson.Unmarshal(data, &appData); err != nil { - fatal("could not parse encore.app: ", err) - os.Exit(1) - } else if appData["id"] != nil && appData["id"] != "" { - fatal("the app is already linked.\n\nNote: to link to a different app, specify the --force flag.") - } - - if appID == "" { - fmt.Println("Make sure the app is created on app.encore.dev, and then enter its ID to link it.") - fmt.Print("App ID: ") - if _, err := fmt.Scanln(&appID); err != nil { - fatal(err) - } else if appID == "" { - fatal("no app id given.") - } - } - - if linked, err := validateAppID(appID); err != nil { - fatal(err) - } else if !linked { - fmt.Fprintln(os.Stderr, "Error: that app does not exist, or you don't have access to it.") - os.Exit(1) - } - - appData["id"] = appID - data, _ := hujson.MarshalIndent(appData, "", " ") - if err := ioutil.WriteFile(filePath, data, 0644); err != nil { - fatal(err) - os.Exit(1) - } - - addEncoreRemote(root, appID) - fmt.Println("Successfully linked app!") -} - -// gitUserConfigured reports whether the user has configured -// user.name and user.email in git. -func gitUserConfigured() (bool, error) { - for _, s := range []string{"user.name", "user.email"} { - out, err := exec.Command("git", "config", s).CombinedOutput() - if err != nil { - return false, err - } else if len(bytes.TrimSpace(out)) == 0 { - return false, nil - } - } - return true, nil -} diff --git a/cli/cmd/encore/app/app.go b/cli/cmd/encore/app/app.go new file mode 100644 index 0000000000..cd4969445b --- /dev/null +++ b/cli/cmd/encore/app/app.go @@ -0,0 +1,23 @@ +package app + +import ( + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/root" +) + +// These can be overwritten using +// `go build -ldflags "-X encr.dev/cli/cmd/encore/app.defaultGitRemoteName=encore"`. +var ( + defaultGitRemoteName = "encore" + defaultGitRemoteURL = "encore://" +) + +var appCmd = &cobra.Command{ + Use: "app", + Short: "Commands to create and link Encore apps", +} + +func init() { + root.Cmd.AddCommand(appCmd) +} diff --git a/cli/cmd/encore/app/clone.go b/cli/cmd/encore/app/clone.go new file mode 100644 index 0000000000..ceef94b513 --- /dev/null +++ b/cli/cmd/encore/app/clone.go @@ -0,0 +1,42 @@ +package app + +import ( + "os" + "os/exec" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" +) + +var cloneAppCmd = &cobra.Command{ + Use: "clone [app-id] [directory]", + Short: "Clone an Encore app to your computer", + Args: cobra.MinimumNArgs(1), + + DisableFlagsInUseLine: true, + Run: func(c *cobra.Command, args []string) { + cmdArgs := append([]string{"clone", "--origin", defaultGitRemoteName, defaultGitRemoteURL + args[0]}, args[1:]...) + cmd := exec.Command("git", cmdArgs...) + cmd.Stdin = os.Stdin + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + os.Exit(1) + } + }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + switch len(args) { + case 0: + return cmdutil.AutoCompleteAppSlug(cmd, args, toComplete) + case 1: + return nil, cobra.ShellCompDirectiveFilterDirs + default: + return nil, cobra.ShellCompDirectiveDefault + } + }, +} + +func init() { + appCmd.AddCommand(cloneAppCmd) +} diff --git a/cli/cmd/encore/app/create.go b/cli/cmd/encore/app/create.go new file mode 100644 index 0000000000..a38a31aba7 --- /dev/null +++ b/cli/cmd/encore/app/create.go @@ -0,0 +1,696 @@ +package app + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io/fs" + "os" + "os/exec" + "path/filepath" + "strings" + "time" + + "github.com/briandowns/spinner" + "github.com/cockroachdb/errors" + "github.com/fatih/color" + "github.com/spf13/cobra" + "github.com/tailscale/hujson" + + "encr.dev/cli/cmd/encore/auth" + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" + "encr.dev/cli/internal/telemetry" + "encr.dev/internal/conf" + "encr.dev/internal/env" + "encr.dev/internal/version" + "encr.dev/pkg/github" + "encr.dev/pkg/xos" + daemonpb "encr.dev/proto/encore/daemon" +) + +var ( + createAppTemplate string + createAppOnPlatform bool +) + +var createAppCmd = &cobra.Command{ + Use: "create [name]", + Short: "Create a new Encore app", + Args: cobra.MaximumNArgs(1), + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + name := "" + if len(args) > 0 { + name = args[0] + } + if err := createApp(context.Background(), name, createAppTemplate); err != nil { + cmdutil.Fatal(err) + } + }, +} + +func init() { + appCmd.AddCommand(createAppCmd) + createAppCmd.Flags().BoolVar(&createAppOnPlatform, "platform", true, "whether to create the app with the Encore Platform") + createAppCmd.Flags().StringVar(&createAppTemplate, "example", "", "URL to example code to use.") +} + +func promptAccountCreation() { + cyan := color.New(color.FgCyan) + red := color.New(color.FgRed) + // Prompt the user for creating an account if they're not logged in. + if _, err := conf.CurrentUser(); errors.Is(err, fs.ErrNotExist) && createAppOnPlatform { + PromptLoop: + for { + _, _ = cyan.Fprint(os.Stderr, "Log in / Sign up for a free Encore Cloud account to enable automated cloud deployments? (Y/n): ") + var input string + _, _ = fmt.Scanln(&input) + input = strings.TrimSpace(input) + switch input { + case "Y", "y", "yes", "": + telemetry.Send("app.create.account", map[string]any{"response": true}) + if err := auth.DoLogin(auth.AutoFlow); err != nil { + cmdutil.Fatal(err) + } + case "N", "n", "no": + telemetry.Send("app.create.account", map[string]any{"response": false}) + // Continue without creating an account. + case "q", "quit", "exit": + os.Exit(1) + default: + // Try again. + _, _ = red.Fprintln(os.Stderr, "Unexpected answer, please enter 'y' or 'n'.") + continue PromptLoop + } + break + } + } +} + +func promptRunApp() bool { + cyan := color.New(color.FgCyan) + red := color.New(color.FgRed) + for { + _, _ = cyan.Fprint(os.Stderr, "Run your app now? (Y/n): ") + var input string + _, _ = fmt.Scanln(&input) + input = strings.TrimSpace(input) + switch input { + case "Y", "y", "yes", "": + telemetry.Send("app.create.run", map[string]any{"response": true}) + return true + case "N", "n", "no": + telemetry.Send("app.create.run", map[string]any{"response": false}) + return false + case "q", "quit", "exit": + telemetry.Send("app.create.run", map[string]any{"response": false}) + return false + default: + // Try again. + _, _ = red.Fprintln(os.Stderr, "Unexpected answer, please enter 'y' or 'n'.") + } + } +} + +// createApp is the implementation of the "encore app create" command. +func createApp(ctx context.Context, name, template string) (err error) { + var lang language + defer func() { + // We need to send the telemetry synchronously to ensure it's sent before the command exits. + telemetry.SendSync("app.create", map[string]any{ + "template": template, + "lang": lang, + "error": err != nil, + }) + }() + cyan := color.New(color.FgCyan) + green := color.New(color.FgGreen) + + promptAccountCreation() + + if name == "" || template == "" { + name, template, lang = selectTemplate(name, template, false) + } + // Treat the special name "empty" as the empty app template + // (the rest of the code assumes that's the empty string). + if template == "empty" { + template = "" + } + + if err := validateName(name); err != nil { + return err + } else if _, err := os.Stat(name); err == nil { + return fmt.Errorf("directory %s already exists", name) + } + + // Parse template information, if provided. + var ex *github.Tree + if template != "" { + var err error + ex, err = parseTemplate(ctx, template) + if err != nil { + return err + } + } + + if err := os.Mkdir(name, 0755); err != nil { + return err + } + defer func() { + if err != nil { + // Clean up the directory we just created in case of an error. + _ = os.RemoveAll(name) + } + }() + + if ex != nil { + s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) + s.Prefix = fmt.Sprintf("Downloading template %s ", ex.Name()) + s.Start() + err := github.ExtractTree(ctx, ex, name) + s.Stop() + fmt.Println() + + if err != nil { + return fmt.Errorf("failed to download template %s: %v", ex.Name(), err) + } + gray := color.New(color.Faint) + _, _ = gray.Printf("Downloaded template %s.\n", ex.Name()) + } else { + // Set up files that we need when we don't have an example + if err := xos.WriteFile(filepath.Join(name, ".gitignore"), []byte("/.encore\n"), 0644); err != nil { + cmdutil.Fatal(err) + } + encoreModData := []byte("module encore.app\n") + if err := xos.WriteFile(filepath.Join(name, "go.mod"), encoreModData, 0644); err != nil { + cmdutil.Fatal(err) + } + } + + _, err = conf.CurrentUser() + loggedIn := err == nil + + exCfg, err := parseExampleConfig(name) + if err != nil { + return fmt.Errorf("failed to parse example config: %v", err) + } + + // Delete the example config file. + _ = os.Remove(exampleJSONPath(name)) + + var app *platform.App + if loggedIn && createAppOnPlatform { + s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) + s.Prefix = "Creating app on encore.dev " + s.Start() + app, err = createAppOnServer(name, exCfg) + s.Stop() + if err != nil { + return fmt.Errorf("creating app on encore.dev: %v", err) + } + } + + appRootRelpath := filepath.FromSlash(exCfg.EncoreAppPath) + encoreAppPath := filepath.Join(name, appRootRelpath, "encore.app") + appData, err := os.ReadFile(encoreAppPath) + if err != nil { + appData, err = []byte("{}"), nil + } + + if app != nil { + appData, err = setEncoreAppID(appData, app.Slug, []string{}) + } else { + appData, err = setEncoreAppID(appData, "", []string{ + "The app is not currently linked to the encore.dev platform.", + `Use "encore app link" to link it.`, + }) + } + if err != nil { + return errors.Wrap(err, "write encore.app file") + } + if err := xos.WriteFile(encoreAppPath, appData, 0644); err != nil { + return errors.Wrap(err, "write encore.app file") + } + + // Update to latest encore.dev release + if _, err := os.Stat(filepath.Join(name, appRootRelpath, "go.mod")); err == nil { + s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) + s.Prefix = "Running go get encore.dev@latest" + s.Start() + if err := gogetEncore(filepath.Join(name, appRootRelpath)); err != nil { + s.FinalMSG = fmt.Sprintf("failed, skipping: %v", err.Error()) + } + s.Stop() + } else if _, err := os.Stat(filepath.Join(name, appRootRelpath, "package.json")); err == nil { + s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) + s.Prefix = "Running npm install encore.dev@latest" + s.Start() + if err := npmInstallEncore(filepath.Join(name, appRootRelpath)); err != nil { + s.FinalMSG = fmt.Sprintf("failed, skipping: %v", err.Error()) + } + s.Stop() + } + + // Rewrite any existence of ENCORE_APP_ID to the allocated app id. + if app != nil { + if err := rewritePlaceholders(name, app); err != nil { + red := color.New(color.FgRed) + _, _ = red.Printf("Failed rewriting source code placeholders, skipping: %v\n", err) + } + } + + if err := initGitRepo(name, app); err != nil { + return err + } + + // Try to generate wrappers. Don't error out if it fails for some reason, + // it's a nice-to-have to avoid IDEs thinking there are compile errors before 'encore run' runs. + _ = generateWrappers(filepath.Join(name, appRootRelpath)) + + // Create the app on the daemon. + appRoot, err := filepath.Abs(filepath.Join(name, appRootRelpath)) + if err != nil { + cmdutil.Fatalf("failed to get absolute path: %v", err) + } + daemon := cmdutil.ConnectDaemon(ctx) + _, err = daemon.CreateApp(ctx, &daemonpb.CreateAppRequest{ + AppRoot: appRoot, + Tutorial: exCfg.Tutorial, + Template: template, + }) + if err != nil { + color.Red("Failed to create app on daemon: %s\n", err) + } + cmdutil.ClearTerminalExceptFirstNLines(0) + _, _ = green.Printf("Successfully created app %s!\n", name) + if app != nil { + cyanf := cyan.SprintfFunc() + fmt.Println() + fmt.Printf("App ID: %s\n", cyanf(app.Slug)) + fmt.Printf("Web URL: %s%s", cyanf("https://app.encore.cloud/"+app.Slug), cmdutil.Newline) + fmt.Printf("App Root: %s\n", cyanf(appRoot)) + fmt.Println() + } + greenBoldF := green.Add(color.Bold).SprintfFunc() + fmt.Printf("Run your app with: %s\n", greenBoldF("cd %s && encore run", filepath.Join(name, appRootRelpath))) + fmt.Println() + if promptRunApp() { + cmdutil.ClearTerminalExceptFirstNLines(0) + stream, err := daemon.Run(ctx, &daemonpb.RunRequest{ + AppRoot: appRoot, + Watch: true, + WorkingDir: ".", + Environ: os.Environ(), + ListenAddr: "127.0.0.1:4000", + Browser: daemonpb.RunRequest_BROWSER_ALWAYS, + }) + if err != nil { + cmdutil.Fatalf("failed to run app: %v", err) + } + converter := cmdutil.ConvertJSONLogs(cmdutil.Colorize(true)) + _ = cmdutil.StreamCommandOutput(stream, converter) + return nil + } + cmdutil.ClearTerminalExceptFirstNLines(0) + fmt.Print("Useful commands:\n\n") + + _, _ = cyan.Printf(" encore run\n") + fmt.Print(" Run your app locally\n\n") + + if detectLang(name) == languageGo { + _, _ = cyan.Printf(" encore test ./...\n") + } else { + _, _ = cyan.Printf(" encore test\n") + } + fmt.Print(" Run tests\n\n") + + if app != nil { + _, _ = cyan.Printf(" git push encore\n") + fmt.Print(" Deploys your app\n\n") + } + + fmt.Printf("Get started now: %s\n", greenBoldF("cd %s && encore run", filepath.Join(name, appRootRelpath))) + return nil +} + +// detectLang attempts to detect the application language for an Encore application +// situated at appRoot. +func detectLang(appRoot string) language { + if _, err := os.Stat(filepath.Join(appRoot, "go.mod")); err == nil { + return languageGo + } else if _, err := os.Stat(filepath.Join(appRoot, "package.json")); err == nil { + return languageTS + } + return languageGo +} + +func validateName(name string) error { + ln := len(name) + if ln == 0 { + return fmt.Errorf("name must not be empty") + } else if ln > 50 { + return fmt.Errorf("name too long (max 50 chars)") + } + + for i, s := range name { + // Outside of [a-z], [0-9] and != '-'? + if !((s >= 'a' && s <= 'z') || (s >= '0' && s <= '9') || s == '-') { + return fmt.Errorf("name must only contain lowercase letters, digits, or dashes") + } else if s == '-' { + if i == 0 { + return fmt.Errorf("name cannot start with a dash") + } else if (i + 1) == ln { + return fmt.Errorf("name cannot end with a dash") + } else if name[i-1] == '-' { + return fmt.Errorf("name cannot contain repeated dashes") + } + } + } + return nil +} + +func gogetEncore(dir string) error { + var goBinPath string + + // Prefer the 'go' binary from the Encore GOROOT if available. + if goroot, ok := env.OptEncoreGoRoot().Get(); ok { + goBinPath = filepath.Join(goroot, "bin", "go") + } else { + // Otherwise fall back to just "go", so that exec.Command + // does a path lookup. + goBinPath = "go" + } + + // Use the 'go' binary from the Encore GOROOT in case the user + // does not have Go installed separately from Encore. + // nosemgrep go.lang.security.audit.dangerous-exec-command.dangerous-exec-command + cmd := exec.Command(goBinPath, "get", "encore.dev@latest") + cmd.Dir = dir + if out, err := cmd.CombinedOutput(); err != nil { + return errors.Newf("go get failed: %v: %s", err, out) + } + return nil +} + +func npmInstallEncore(dir string) error { + args := []string{"install"} + if version.Channel == version.DevBuild { + args = append(args, filepath.Join(env.EncoreRuntimesPath(), "js", "encore.dev")) + } else { + args = append(args, fmt.Sprintf("encore.dev@%s", strings.TrimPrefix(version.Version, "v"))) + } + + // First install the 'encore.dev' package. + cmd := exec.Command("npm", args...) + cmd.Dir = dir + out, err := cmd.CombinedOutput() + if err != nil { + err = fmt.Errorf("installing encore.dev package failed: %v: %s", err, out) + } + + // Then run 'npm install'. + cmd = exec.Command("npm", "install") + cmd.Dir = dir + if out2, err2 := cmd.CombinedOutput(); err2 != nil && err == nil { + err = fmt.Errorf("'npm install' failed: %v: %s", err2, out2) + } + + return err +} + +func createAppOnServer(name string, cfg exampleConfig) (*platform.App, error) { + if _, err := conf.CurrentUser(); err != nil { + return nil, err + } + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + params := &platform.CreateAppParams{ + Name: name, + InitialSecrets: cfg.InitialSecrets, + AppRootDir: cfg.EncoreAppPath, + } + return platform.CreateApp(ctx, params) +} + +func parseTemplate(ctx context.Context, tmpl string) (*github.Tree, error) { + // If the template does not contain a colon or a dot, it's definitely + // not a github.com URL. Assume it's a simple template name. + if !strings.Contains(tmpl, ":") && !strings.Contains(tmpl, ".") { + tmpl = "https://github.com/encoredev/examples/tree/main/" + tmpl + } + return github.ParseTree(ctx, tmpl) +} + +// initGitRepo initializes the git repo. +// If app is not nil, it configures the repo to push to the given app. +// If git does not exist, it reports an error matching exec.ErrNotFound. +func initGitRepo(path string, app *platform.App) (err error) { + defer func() { + if e := recover(); e != nil { + if ee, ok := e.(error); ok { + err = ee + } else { + panic(e) + } + } + }() + + git := func(args ...string) []byte { + cmd := exec.Command("git", args...) + cmd.Dir = path + out, err := cmd.CombinedOutput() + if err != nil && !errors.Is(err, exec.ErrNotFound) { + panic(fmt.Errorf("git %s: %s (%w)", strings.Join(args, " "), out, err)) + } + return out + } + + // Initialize git repo + git("init") + if app != nil && app.MainBranch != nil { + git("checkout", "-b", *app.MainBranch) + } + git("config", "--local", "push.default", "current") + git("add", "-A") + + cmd := exec.Command("git", "commit", "-m", "Initial commit") + cmd.Dir = path + // Configure the committer if the user hasn't done it themselves yet. + if ok, _ := gitUserConfigured(); !ok { + cmd.Env = append(os.Environ(), + "GIT_AUTHOR_NAME=Encore", + "GIT_AUTHOR_EMAIL=git-bot@encore.dev", + "GIT_COMMITTER_NAME=Encore", + "GIT_COMMITTER_EMAIL=git-bot@encore.dev", + ) + } + if out, err := cmd.CombinedOutput(); err != nil && !errors.Is(err, exec.ErrNotFound) { + return fmt.Errorf("create initial commit repository: %s (%v)", out, err) + } + + if app != nil { + git("remote", "add", defaultGitRemoteName, defaultGitRemoteURL+app.Slug) + } + + return nil +} + +func addEncoreRemote(root, appID string) { + // Determine if there are any remotes + cmd := exec.Command("git", "remote") + cmd.Dir = root + out, err := cmd.CombinedOutput() + if err != nil { + return + } + out = bytes.TrimSpace(out) + if len(out) == 0 { + cmd = exec.Command("git", "remote", "add", defaultGitRemoteName, defaultGitRemoteURL+appID) + cmd.Dir = root + if err := cmd.Run(); err == nil { + fmt.Println("Configured git remote 'encore' to push/pull with Encore.") + } + } +} + +// gitUserConfigured reports whether the user has configured +// user.name and user.email in git. +func gitUserConfigured() (bool, error) { + for _, s := range []string{"user.name", "user.email"} { + out, err := exec.Command("git", "config", s).CombinedOutput() + if err != nil { + return false, err + } else if len(bytes.TrimSpace(out)) == 0 { + return false, nil + } + } + return true, nil +} + +// rewritePlaceholders recursively rewrites all files within basePath +// to replace placeholders with the actual values for this particular app. +func rewritePlaceholders(basePath string, app *platform.App) error { + var first error + err := filepath.WalkDir(basePath, func(path string, info fs.DirEntry, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + if err := rewritePlaceholder(path, info, app); err != nil { + if first == nil { + first = err + } + } + return nil + }) + if err == nil { + err = first + } + return err +} + +// rewritePlaceholder rewrites a file to replace placeholders with the +// actual values for this particular app. If the file contains none of +// the placeholders, this is a no-op. +func rewritePlaceholder(path string, info fs.DirEntry, app *platform.App) error { + data, err := os.ReadFile(path) + if err != nil { + return err + } + placeholders := []string{ + "{{ENCORE_APP_ID}}", app.Slug, + } + + var replaced bool + for i := 0; i < len(placeholders); i += 2 { + placeholder := []byte(placeholders[i]) + target := []byte(placeholders[i+1]) + if bytes.Contains(data, placeholder) { + data = bytes.ReplaceAll(data, placeholder, target) + replaced = true + } + } + + if replaced { + return xos.WriteFile(path, data, info.Type().Perm()) + } + return nil +} + +// exampleConfig is the optional configuration file for example apps. +type exampleConfig struct { + // Relative path to the directory where the `encore.app` should be located. + // Defaults to ".". + EncoreAppPath string `json:"encore_app_path"` + + InitialSecrets map[string]string `json:"initial_secrets"` + Tutorial bool `json:"tutorial"` +} + +func parseExampleConfig(repoPath string) (cfg exampleConfig, err error) { + baseConfig := exampleConfig{ + EncoreAppPath: ".", + } + data, err := os.ReadFile(exampleJSONPath(repoPath)) + if errors.Is(err, fs.ErrNotExist) { + return baseConfig, nil + } else if err != nil { + return baseConfig, err + } + + data, err = hujson.Standardize(data) + if err != nil { + return baseConfig, err + } else if err := json.Unmarshal(data, &cfg); err != nil { + return baseConfig, err + } + + if cfg.EncoreAppPath == "" { + cfg.EncoreAppPath = "." + } + if !filepath.IsLocal(cfg.EncoreAppPath) { + return baseConfig, errors.New("encore_app_path must be a local path") + } + return cfg, nil +} + +func exampleJSONPath(repoPath string) string { + return filepath.Join(repoPath, "example-initial-setup.json") +} + +// setEncoreAppID rewrites the encore.app file to replace the app id, preserving comments. +// It optionally adds comment lines before the "id" field if commentLines is not nil. +func setEncoreAppID(data []byte, id string, commentLines []string) ([]byte, error) { + if len(data) == 0 { + data = []byte("{}") + } + + root, err := hujson.Parse(data) + if err != nil { + return data, errors.Wrap(err, "parse encore.app") + } + obj, ok := root.Value.(*hujson.Object) + if !ok { + return data, errors.New("invalid encore.app format: not a json object") + } + + var buf bytes.Buffer + for i, ln := range commentLines { + if i == 0 { + fmt.Fprintf(&buf, "\n") + } + fmt.Fprintf(&buf, "\t// %s\n", strings.TrimSpace(ln)) + } + extra := hujson.Extra(buf.Bytes()) + jsonValue, _ := json.Marshal(id) + value := hujson.Value{ + Value: hujson.Literal(jsonValue), + } + + found := false + for i := range obj.Members { + m := &obj.Members[i] + if lit, ok := m.Name.Value.(hujson.Literal); ok && lit.String() == "id" { + if commentLines != nil { + m.Name.BeforeExtra = extra + } + m.Value = value + found = true + break + } + } + + if !found { + obj.Members = append([]hujson.ObjectMember{{ + Name: hujson.Value{ + BeforeExtra: extra, + Value: hujson.Literal(`"id"`), + }, + Value: value, + }}, obj.Members...) + } + + root.Format() + return root.Pack(), nil +} + +// generateWrappers runs 'encore gen wrappers' in the given directory. +func generateWrappers(dir string) error { + // Use this executable if we can. + exe, err := os.Executable() + if err != nil { + exe = "encore" + } + // nosemgrep go.lang.security.audit.dangerous-exec-command.dangerous-exec-command + cmd := exec.Command(exe, "gen", "wrappers") + cmd.Dir = dir + if out, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("encore gen wrappers failed: %v: %s", err, out) + } + return nil +} diff --git a/cli/cmd/encore/app/create_form.go b/cli/cmd/encore/app/create_form.go new file mode 100644 index 0000000000..2cc5667acd --- /dev/null +++ b/cli/cmd/encore/app/create_form.go @@ -0,0 +1,725 @@ +package app + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "strings" + "sync" + "time" + + "github.com/charmbracelet/bubbles/list" + "github.com/charmbracelet/bubbles/spinner" + "github.com/charmbracelet/bubbles/textinput" + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" + "github.com/tailscale/hujson" + + "encr.dev/cli/cmd/encore/cmdutil" +) + +const ( + codeBlue = "#6D89FF" + codePurple = "#A36C8C" + codeGreen = "#B3D77E" + validationFail = "#CB1010" +) + +var ( + inputStyle = lipgloss.NewStyle().Foreground(lipgloss.AdaptiveColor{Dark: codeBlue, Light: codeBlue}) + descStyle = lipgloss.NewStyle().Foreground(lipgloss.AdaptiveColor{Dark: codeGreen, Light: codePurple}) + docStyle = lipgloss.NewStyle().Padding(0, 2, 0, 2) + errorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color(validationFail)) + successStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#00C200")) +) + +type templateItem struct { + ItemTitle string `json:"title"` + Desc string `json:"desc"` + Template string `json:"template"` + Lang language `json:"lang"` +} + +func (i templateItem) Title() string { return i.ItemTitle } +func (i templateItem) Description() string { return i.Desc } +func (i templateItem) FilterValue() string { return i.ItemTitle } + +type createFormModel struct { + step int // 0, 1, 2, 3 + + lang languageSelectModel + templates templateListModel + appName appNameModel + + skipShowingTemplate bool + + aborted bool +} + +func (m createFormModel) Init() tea.Cmd { + return tea.Batch( + m.appName.Init(), + m.templates.Init(), + ) +} + +type languageSelectDone struct { + lang language +} + +type languageSelectModel struct { + list list.Model +} + +func (m languageSelectModel) Selected() language { + sel := m.list.SelectedItem() + if sel == nil { + return "" + } + return sel.(langItem).lang +} + +func (m languageSelectModel) Update(msg tea.Msg) (languageSelectModel, tea.Cmd) { + var c tea.Cmd + + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.Type { + case tea.KeyEnter: + // Have we selected a language? + if idx := m.list.Index(); idx >= 0 { + return m, func() tea.Msg { + return languageSelectDone{ + lang: m.Selected(), + } + } + } + } + } + + m.list, c = m.list.Update(msg) + return m, c +} + +func (m *languageSelectModel) SetSize(width, height int) { + m.list.SetWidth(width) + m.list.SetHeight(max(height-1, 0)) +} + +const checkmark = "✔" + +func (m languageSelectModel) View() string { + var b strings.Builder + b.WriteString(inputStyle.Render("Select language for your application")) + b.WriteString(descStyle.Render(" [Use arrows to move]")) + b.WriteString("\n") + b.WriteString(m.list.View()) + + return b.String() +} + +type appNameDone struct{} + +type appNameModel struct { + predefined string + text textinput.Model + dirExists bool +} + +func (m appNameModel) Init() tea.Cmd { + return tea.Batch( + textinput.Blink, + ) +} + +func (m appNameModel) Selected() string { + if m.predefined != "" { + return m.predefined + } + return m.text.Value() +} + +func (m appNameModel) Update(msg tea.Msg) (appNameModel, tea.Cmd) { + var cmds []tea.Cmd + var c tea.Cmd + m.text, c = m.text.Update(msg) + cmds = append(cmds, c) + + if val := m.text.Value(); val != "" { + _, err := os.Stat(val) + m.dirExists = err == nil + } + + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.Type { + case tea.KeyEnter: + if m.text.Value() != "" && !m.dirExists { + cmds = append(cmds, func() tea.Msg { + return appNameDone{} + }) + } + } + } + + return m, tea.Batch(cmds...) +} + +func (m appNameModel) View() string { + var b strings.Builder + if m.text.Focused() { + b.WriteString(inputStyle.Render("App Name")) + b.WriteString(descStyle.Render(" [Use only lowercase letters, digits, and dashes]")) + b.WriteByte('\n') + b.WriteString(m.text.View()) + if m.dirExists { + b.WriteString(errorStyle.Render(" error: dir already exists")) + } + } else { + fmt.Fprintf(&b, "%s App Name: %s", checkmark, m.text.Value()) + } + b.WriteByte('\n') + return b.String() +} + +type templateListModel struct { + predefined string + filter language + + all []templateItem + list list.Model + loading spinner.Model +} + +func (m templateListModel) Init() tea.Cmd { + return tea.Batch( + loadTemplates, + m.loading.Tick, + ) +} + +func (m *templateListModel) SetSize(width, height int) { + m.list.SetWidth(width) + m.list.SetHeight(max(height-1, 0)) +} + +type templateSelectDone struct{} + +func (m templateListModel) Update(msg tea.Msg) (templateListModel, tea.Cmd) { + var cmds []tea.Cmd + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.Type { + case tea.KeyEnter: + // Have we selected a language? + if idx := m.list.Index(); idx >= 0 { + return m, func() tea.Msg { return templateSelectDone{} } + } + } + + case spinner.TickMsg: + m.loading, _ = m.loading.Update(msg) + + case loadedTemplates: + m.all = msg + m.refreshFilter() + newList, c := m.list.Update(msg) + m.list = newList + cmds = append(cmds, c) + } + + newList, c := m.list.Update(msg) + m.list = newList + cmds = append(cmds, c) + + return m, tea.Batch(cmds...) +} + +func (m *templateListModel) UpdateFilter(lang language) { + m.filter = lang + m.refreshFilter() +} + +func (m *templateListModel) refreshFilter() { + var listItems []list.Item + for _, it := range m.all { + if it.Lang == m.filter { + listItems = append(listItems, it) + } + } + m.list.SetItems(listItems) +} + +func (m templateListModel) View() string { + var b strings.Builder + b.WriteString(inputStyle.Render("Template")) + b.WriteString(descStyle.Render(" [Use arrows to move]")) + b.WriteByte('\n') + b.WriteString(m.list.View()) + + return b.String() +} + +func (m templateListModel) Selected() string { + if m.predefined != "" { + return m.predefined + } + idx := m.list.Index() + if idx < 0 { + return "" + } + return m.list.Items()[idx].FilterValue() +} + +func (m createFormModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + var ( + cmds []tea.Cmd + c tea.Cmd + ) + + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.Type { + case tea.KeyCtrlC, tea.KeyEsc, 'q': + m.aborted = true + return m, tea.Quit + } + + switch m.step { + case 0: + m.lang, c = m.lang.Update(msg) + cmds = append(cmds, c) + case 1: + m.templates, c = m.templates.Update(msg) + cmds = append(cmds, c) + case 2: + m.appName, c = m.appName.Update(msg) + cmds = append(cmds, c) + } + return m, tea.Batch(cmds...) + + case languageSelectDone: + m.step = 1 + if m.skipShowingTemplate { + m.step = 2 + } + m.templates.UpdateFilter(msg.lang) + + case templateSelectDone: + if m.appName.predefined != "" { + // We're done. + m.step = 3 + cmds = append(cmds, tea.Quit) + } else { + m.step = 2 + } + + case appNameDone: + cmds = append(cmds, tea.Quit) + m.step = 3 + + case tea.WindowSizeMsg: + m.SetSize(msg.Width, msg.Height) + return m, nil + } + + // Update all submodels for other messages. + m.lang, c = m.lang.Update(msg) + cmds = append(cmds, c) + m.templates, c = m.templates.Update(msg) + cmds = append(cmds, c) + m.appName, c = m.appName.Update(msg) + cmds = append(cmds, c) + + return m, tea.Batch(cmds...) +} + +func (m *createFormModel) SetSize(width, height int) { + // Step 1 + doneHeight := lipgloss.Height(m.doneView()) + { + availHeight := height - doneHeight + m.lang.SetSize(width, availHeight) + } + + // Step 2 + { + availHeight := height - doneHeight + m.templates.SetSize(width, availHeight) + } +} + +func (m createFormModel) doneView() string { + var b strings.Builder + + renderDone := func(title, value string) { + b.WriteString(successStyle.Render(fmt.Sprintf("%s %s: ", checkmark, title))) + b.WriteString(value) + b.WriteByte('\n') + } + + renderLangDone := func() { + renderDone("Language", m.lang.Selected().Display()) + } + + renderNameDone := func() { + renderDone("App Name", m.appName.Selected()) + } + + renderTemplateDone := func() { + renderDone("Template", m.templates.Selected()) + } + + if m.appName.predefined != "" { + renderNameDone() + } + if m.templates.predefined == "" && m.step > 0 { + renderLangDone() + } + if !m.skipShowingTemplate { + if m.templates.predefined != "" || m.step > 1 { + renderTemplateDone() + } + } + if m.appName.predefined == "" && m.step > 2 { + renderNameDone() + } + + return b.String() +} + +func (m createFormModel) View() string { + var b strings.Builder + + doneView := m.doneView() + + b.WriteString(doneView) + if doneView != "" { + b.WriteByte('\n') + } + + if m.step == 0 { + b.WriteString(m.lang.View()) + } + + if m.step == 1 { + b.WriteString(m.templates.View()) + } + + if m.step == 2 { + b.WriteString(m.appName.View()) + } + + return docStyle.Render(b.String()) +} + +func (m templateListModel) templatesLoading() bool { + return len(m.list.Items()) == 0 +} + +func (m templateListModel) SelectedItem() (templateItem, bool) { + if m.predefined != "" { + return templateItem{}, false + } + idx := m.list.Index() + items := m.list.Items() + if idx >= 0 && len(items) > idx { + return items[idx].(templateItem), true + } + return templateItem{}, false +} + +func selectTemplate(inputName, inputTemplate string, skipShowingTemplate bool) (appName, template string, selectedLang language) { + // If we have both name and template already, return them. + if inputName != "" && inputTemplate != "" { + return inputName, inputTemplate, "" + } + + var lang languageSelectModel + { + ls := list.NewDefaultItemStyles() + ls.SelectedTitle = ls.SelectedTitle.Foreground(lipgloss.Color(codeBlue)).BorderForeground(lipgloss.Color(codeBlue)) + ls.SelectedDesc = ls.SelectedDesc.Foreground(lipgloss.Color(codeBlue)).BorderForeground(lipgloss.Color(codeBlue)) + del := list.NewDefaultDelegate() + del.Styles = ls + del.ShowDescription = false + del.SetSpacing(0) + + items := []list.Item{ + langItem{ + lang: languageGo, + desc: "Build performant and scalable backends with Go", + }, + langItem{ + lang: languageTS, + desc: "Build backend and full-stack applications with TypeScript", + }, + } + + ll := list.New(items, del, 0, 0) + ll.SetShowTitle(false) + ll.SetShowHelp(false) + ll.SetShowPagination(true) + ll.SetShowFilter(false) + ll.SetFilteringEnabled(false) + ll.SetShowStatusBar(false) + lang = languageSelectModel{ + list: ll, + } + lang.SetSize(0, 20) + } + + var templates templateListModel + { + ls := list.NewDefaultItemStyles() + ls.SelectedTitle = ls.SelectedTitle.Foreground(lipgloss.Color(codeBlue)).BorderForeground(lipgloss.Color(codeBlue)) + ls.SelectedDesc = ls.SelectedDesc.Foreground(lipgloss.Color(codeBlue)).BorderForeground(lipgloss.Color(codeBlue)) + del := list.NewDefaultDelegate() + del.Styles = ls + + ll := list.New(nil, del, 0, 20) + ll.SetShowTitle(false) + ll.SetShowHelp(false) + ll.SetShowPagination(true) + ll.SetShowFilter(false) + ll.SetFilteringEnabled(false) + ll.SetShowStatusBar(false) + + sp := spinner.New() + sp.Spinner = spinner.Dot + sp.Style = inputStyle.Copy().Inline(true) + templates = templateListModel{ + predefined: inputTemplate, + list: ll, + loading: sp, + } + } + + var nameModel appNameModel + { + text := textinput.New() + text.Focus() + text.CharLimit = 20 + text.Width = 30 + text.Validate = incrementalValidateNameInput + + nameModel = appNameModel{predefined: inputName, text: text} + } + + m := createFormModel{ + step: 0, + lang: lang, + templates: templates, + appName: nameModel, + skipShowingTemplate: skipShowingTemplate, + } + + // If we have a name, start the list without any selection. + if m.appName.predefined != "" { + m.templates.list.Select(-1) + } + if m.templates.predefined != "" { + m.step = 2 // skip to app name selection + } + + p := tea.NewProgram(m) + + result, err := p.Run() + if err != nil { + cmdutil.Fatal(err) + } + + // Validate the result. + res := result.(createFormModel) + if res.aborted { + os.Exit(1) + } + + appName, template = inputName, inputTemplate + + if appName == "" { + appName = res.appName.text.Value() + } + + if template == "" { + sel, ok := res.templates.SelectedItem() + if !ok { + cmdutil.Fatal("no template selected") + } + template = sel.Template + } + + return appName, template, res.lang.Selected() +} + +type langItem struct { + lang language + desc string +} + +func (i langItem) FilterValue() string { + return i.lang.Display() +} +func (i langItem) Title() string { + return i.FilterValue() +} +func (i langItem) Description() string { return "" } + +type language string + +const ( + languageGo language = "go" + languageTS language = "ts" +) + +func (lang language) Display() string { + switch lang { + case languageGo: + return "Go" + case languageTS: + return "TypeScript" + default: + return string(lang) + } +} + +type loadedTemplates []templateItem + +var defaultTutorials = []templateItem{ + { + ItemTitle: "Intro to Encore.ts", + Desc: "An interactive tutorial", + Template: "ts/introduction", + Lang: "ts", + }, +} + +var defaultTemplates = []templateItem{ + { + ItemTitle: "Hello World", + Desc: "A simple REST API", + Template: "hello-world", + Lang: "go", + }, + { + ItemTitle: "Hello World", + Desc: "A simple REST API", + Template: "ts/hello-world", + Lang: "ts", + }, + { + ItemTitle: "Uptime Monitor", + Desc: "Microservices, SQL Databases, Pub/Sub, Cron Jobs", + Template: "uptime", + Lang: "go", + }, + { + ItemTitle: "Uptime Monitor", + Desc: "Microservices, SQL Databases, Pub/Sub, Cron Jobs", + Template: "ts/uptime", + Lang: "ts", + }, + { + ItemTitle: "GraphQL", + Desc: "GraphQL API, Microservices, SQL Database", + Template: "graphql", + Lang: "go", + }, + { + ItemTitle: "URL Shortener", + Desc: "REST API, SQL Database", + Template: "url-shortener", + Lang: "go", + }, + { + ItemTitle: "URL Shortener", + Desc: "REST API, SQL Database", + Template: "ts/url-shortener", + Lang: "ts", + }, + { + ItemTitle: "SaaS Starter", + Desc: "Complete app with Clerk auth, Stripe billing, etc. (advanced)", + Template: "ts/saas-starter", + Lang: "ts", + }, + { + ItemTitle: "Empty app", + Desc: "Start from scratch (experienced users only)", + Template: "", + Lang: "go", + }, + { + ItemTitle: "Empty app", + Desc: "Start from scratch (experienced users only)", + Template: "ts/empty", + Lang: "ts", + }, +} + +func fetchTemplates(url string, defaults []templateItem) []templateItem { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if req, err := http.NewRequestWithContext(ctx, "GET", url, nil); err == nil { + if resp, err := http.DefaultClient.Do(req); err == nil { + if data, err := io.ReadAll(resp.Body); err == nil { + data, err = hujson.Standardize(data) + if err == nil { + var items []templateItem + if err := json.Unmarshal(data, &items); err == nil && len(items) > 0 { + return items + } + } + } + } + } + return defaults +} + +func loadTemplates() tea.Msg { + var wg sync.WaitGroup + var templates, tutorials []templateItem + wg.Add(1) + go func() { + defer wg.Done() + templates = fetchTemplates("https://raw.githubusercontent.com/encoredev/examples/main/cli-templates.json", defaultTemplates) + }() + wg.Add(1) + go func() { + defer wg.Done() + tutorials = fetchTemplates("https://raw.githubusercontent.com/encoredev/examples/main/cli-tutorials.json", defaultTutorials) + }() + wg.Wait() + return loadedTemplates(append(tutorials, templates...)) +} + +// incrementalValidateNameInput is like validateName but only +// checks for valid/invalid characters. It can't check for +// whether the last character is a dash, since if we treat that +// as an error the user won't be able to enter dashes at all. +func incrementalValidateNameInput(name string) error { + ln := len(name) + if ln == 0 { + return fmt.Errorf("name must not be empty") + } else if ln > 50 { + return fmt.Errorf("name too long (max 50 chars)") + } + + for i, s := range name { + // Outside of [a-z], [0-9] and != '-'? + if !((s >= 'a' && s <= 'z') || (s >= '0' && s <= '9') || s == '-') { + return fmt.Errorf("name must only contain lowercase letters, digits, or dashes") + } else if s == '-' { + if i == 0 { + return fmt.Errorf("name cannot start with a dash") + } else if name[i-1] == '-' { + return fmt.Errorf("name cannot contain repeated dashes") + } + } + } + + return nil +} diff --git a/cli/cmd/encore/app/create_test.go b/cli/cmd/encore/app/create_test.go new file mode 100644 index 0000000000..f260070627 --- /dev/null +++ b/cli/cmd/encore/app/create_test.go @@ -0,0 +1,79 @@ +package app + +import ( + "fmt" + "testing" +) + +func Test_setEncoreAppID(t *testing.T) { + tests := []struct { + data []byte + id string + commentLines []string + want string + }{ + { + data: []byte(`{}`), + id: "foo", + commentLines: []string{"bar"}, + want: `{ + // bar + "id": "foo", +} +`, + }, + { + data: []byte(``), + id: "foo", + commentLines: []string{"bar"}, + want: `{ + // bar + "id": "foo", +} +`, + }, + { + data: []byte(`{ + // foo + "id": "test", +}`), + id: "foo", + commentLines: []string{"bar", "baz"}, + want: `{ + // bar + // baz + "id": "foo", +} +`, + }, + { + data: []byte(`{ + "some_other_field": true, + // foo + "id": "test", +}`), + id: "foo", + commentLines: []string{"bar", "baz"}, + want: `{ + "some_other_field": true, + // bar + // baz + "id": "foo", +} +`, + }, + } + for i, tt := range tests { + t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { + got, err := setEncoreAppID(tt.data, tt.id, tt.commentLines) + if err != nil { + t.Fatal(err) + } + + gotStr := string(got) + if gotStr != tt.want { + t.Errorf("setEncoreAppID() = %q, want %q", gotStr, tt.want) + } + }) + } +} diff --git a/cli/cmd/encore/app/initialize.go b/cli/cmd/encore/app/initialize.go new file mode 100644 index 0000000000..7d9607ce9d --- /dev/null +++ b/cli/cmd/encore/app/initialize.go @@ -0,0 +1,132 @@ +package app + +import ( + "errors" + "fmt" + "os" + "strings" + "time" + + "github.com/briandowns/spinner" + "github.com/fatih/color" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/internal/conf" + "encr.dev/pkg/xos" +) + +const ( + tsEncoreAppData = `{%s + "id": "%s", + "lang": "typescript", +} +` + goEncoreAppData = `{%s + "id": "%s", +} +` +) + +// Create a new app from scratch: `encore app create` +// Link an existing app to an existing repo: `encore app link ` +// Link an existing repo to a new app: `encore app init ` + +func init() { + initAppCmd := &cobra.Command{ + Use: "init [name]", + Short: "Create a new Encore app from an existing repository", + Args: cobra.MaximumNArgs(1), + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + var name string + if len(args) > 0 { + name = args[0] + } + if err := initializeApp(name); err != nil { + cmdutil.Fatal(err) + } + }, + } + + appCmd.AddCommand(initAppCmd) +} + +func initializeApp(name string) error { + // Check if encore.app file exists + _, _, err := cmdutil.MaybeAppRoot() + if errors.Is(err, cmdutil.ErrNoEncoreApp) { + // expected + } else if err != nil { + cmdutil.Fatal(err) + } else if err == nil { + // There is already an app here or in a parent directory. + cmdutil.Fatal("an encore.app file already exists (here or in a parent directory)") + } + + cyan := color.New(color.FgCyan) + promptAccountCreation() + + name, _, lang := selectTemplate(name, "", true) + + if err := validateName(name); err != nil { + return err + } + + appSlug := "" + appSlugComments := "" + // Create the app on the server. + if _, err := conf.CurrentUser(); err == nil { + s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) + s.Prefix = "Creating app on encore.dev " + s.Start() + + app, err := createAppOnServer(name, exampleConfig{}) + s.Stop() + if err != nil { + return fmt.Errorf("creating app on encore.dev: %v", err) + } + appSlug = app.Slug + } + + // Create the encore.app file + var encoreAppTemplate = goEncoreAppData + if lang == "ts" { + encoreAppTemplate = tsEncoreAppData + } + if appSlug == "" { + appSlugComments = strings.Join([]string{ + "", + "The app is not currently linked to the encore.dev platform.", + `Use "encore app link" to link it.`, + }, "\n\t//") + } + encoreAppData := []byte(fmt.Sprintf(encoreAppTemplate, appSlugComments, appSlug)) + if err := xos.WriteFile("encore.app", encoreAppData, 0644); err != nil { + return err + } + + // Update to latest encore.dev release if this looks to be a go module. + if _, err := os.Stat("go.mod"); err == nil { + s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) + s.Prefix = "Running go get encore.dev@latest" + s.Start() + if err := gogetEncore("."); err != nil { + s.FinalMSG = fmt.Sprintf("failed, skipping: %v", err.Error()) + } + s.Stop() + } + + green := color.New(color.FgGreen) + _, _ = green.Fprint(os.Stdout, "Successfully initialized application on Encore Cloud!\n") + if appSlug == "" { + _, _ = fmt.Fprintf(os.Stdout, "The app is not currently linked to the encore.dev platform.\n") + _, _ = fmt.Fprintf(os.Stdout, "Use \"encore app link\" to link it.\n") + return nil + } + _, _ = fmt.Fprintf(os.Stdout, "- App ID: %s\n", cyan.Sprint(appSlug)) + _, _ = fmt.Fprintf(os.Stdout, "- Cloud Dashboard: %s\n\n", cyan.Sprintf("https://app.encore.cloud/%s", appSlug)) + + return nil +} diff --git a/cli/cmd/encore/app/link.go b/cli/cmd/encore/app/link.go new file mode 100644 index 0000000000..0573e78d7d --- /dev/null +++ b/cli/cmd/encore/app/link.go @@ -0,0 +1,148 @@ +package app + +import ( + "bytes" + "context" + "errors" + "fmt" + "io/fs" + "os" + "path/filepath" + "time" + + "github.com/spf13/cobra" + "github.com/tailscale/hujson" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" + "encr.dev/internal/conf" + "encr.dev/pkg/xos" +) + +var forceLink bool +var linkAppCmd = &cobra.Command{ + Use: "link [app-id]", + Short: "Link an Encore app with the server", + Args: cobra.MaximumNArgs(1), + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + var appID string + if len(args) > 0 { + appID = args[0] + } + linkApp(appID, forceLink) + }, + ValidArgsFunction: cmdutil.AutoCompleteAppSlug, +} + +func init() { + appCmd.AddCommand(linkAppCmd) + linkAppCmd.Flags().BoolVarP(&forceLink, "force", "f", false, "Force link even if the app is already linked.") +} + +func linkApp(appID string, force bool) { + // Determine the app root. + root, _, err := cmdutil.MaybeAppRoot() + if errors.Is(err, cmdutil.ErrNoEncoreApp) { + root, err = os.Getwd() + } + if err != nil { + cmdutil.Fatal(err) + } + + filePath := filepath.Join(root, "encore.app") + data, err := os.ReadFile(filePath) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + cmdutil.Fatal(err) + os.Exit(1) + } + if len(bytes.TrimSpace(data)) == 0 { + // Treat missing and empty files as an empty object. + data = []byte("{}") + } + + val, err := hujson.Parse(data) + if err != nil { + cmdutil.Fatal("could not parse encore.app: ", err) + } + + appData, ok := val.Value.(*hujson.Object) + if !ok { + cmdutil.Fatal("could not parse encore.app: expected JSON object") + } + + // Find the "id" value, if any. + var idValue *hujson.Value + for i := 0; i < len(appData.Members); i++ { + kv := &appData.Members[i] + lit, ok := kv.Name.Value.(hujson.Literal) + if !ok || lit.String() != "id" { + continue + } + idValue = &kv.Value + } + + if idValue != nil { + val, ok := idValue.Value.(hujson.Literal) + if ok && val.String() != "" && val.String() != appID && !force { + cmdutil.Fatal("the app is already linked.\n\nNote: to link to a different app, specify the --force flag.") + } + } + + if appID == "" { + // The app is not linked. Prompt the user for an app ID. + fmt.Println("Make sure the app is created on app.encore.cloud, and then enter its ID to link it.") + fmt.Print("App ID: ") + if _, err := fmt.Scanln(&appID); err != nil { + cmdutil.Fatal(err) + } else if appID == "" { + cmdutil.Fatal("no app id given.") + } + } + + if linked, err := validateAppSlug(appID); err != nil { + cmdutil.Fatal(err) + } else if !linked { + fmt.Fprintln(os.Stderr, "Error: that app does not exist, or you don't have access to it.") + os.Exit(1) + } + + // Write it back to our data structure. + if idValue != nil { + idValue.Value = hujson.String(appID) + } else { + appData.Members = append(appData.Members, hujson.ObjectMember{ + Name: hujson.Value{Value: hujson.String("id")}, + Value: hujson.Value{Value: hujson.String(appID)}, + }) + } + + val.Format() + if err := xos.WriteFile(filePath, val.Pack(), 0644); err != nil { + cmdutil.Fatal(err) + os.Exit(1) + } + + addEncoreRemote(root, appID) + fmt.Println("Successfully linked app!") +} + +func validateAppSlug(slug string) (ok bool, err error) { + if _, err := conf.CurrentUser(); errors.Is(err, fs.ErrNotExist) { + cmdutil.Fatal("not logged in. Run 'encore auth login' first.") + } else if err != nil { + return false, err + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if _, err := platform.GetApp(ctx, slug); err != nil { + var e platform.Error + if errors.As(err, &e) && e.HTTPCode == 404 { + return false, nil + } + return false, err + } + return true, nil +} diff --git a/cli/cmd/encore/auth.go b/cli/cmd/encore/auth.go deleted file mode 100644 index 7b5484bc8e..0000000000 --- a/cli/cmd/encore/auth.go +++ /dev/null @@ -1,122 +0,0 @@ -package main - -import ( - "errors" - "fmt" - "os" - "runtime" - "time" - - "encr.dev/cli/internal/browser" - "encr.dev/cli/internal/conf" - "encr.dev/cli/internal/login" - "github.com/briandowns/spinner" - "github.com/spf13/cobra" -) - -func init() { - authCmd := &cobra.Command{ - Use: "auth", - Short: "Commands to authenticate with Encore", - } - - signupCmd := &cobra.Command{ - Use: "signup", - Short: "Create a new Encore account", - Run: func(cmd *cobra.Command, args []string) { - doLogin() - }, - } - - loginCmd := &cobra.Command{ - Use: "login", - Short: "Log in to Encore", - Run: func(cmd *cobra.Command, args []string) { - if err := doLogin(); err != nil { - fatal(err) - } - }, - } - - logoutCmd := &cobra.Command{ - Use: "logout", - Short: "Logs out the currently logged in user", - Run: func(cmd *cobra.Command, args []string) { - doLogout() - }, - } - - whoamiCmd := &cobra.Command{ - Use: "whoami", - Short: "Show the current logged in user", - Run: func(cmd *cobra.Command, args []string) { - whoami() - }, - } - - authCmd.AddCommand(signupCmd) - authCmd.AddCommand(loginCmd) - authCmd.AddCommand(logoutCmd) - authCmd.AddCommand(whoamiCmd) - rootCmd.AddCommand(authCmd) -} - -func doLogin() error { - flow, err := login.Begin() - if err != nil { - return err - } - - browser.Open(flow.URL) - - // On Windows we need a proper \r\n newline to ensure the URL detection doesn't extend to the next line. - // fmt.Fprintln and family prints just a simple \n, so don't use that. - fmt.Fprint(os.Stdout, "Log in to Encore using your browser here: ", flow.URL, newline) - - s := spinner.New(spinner.CharSets[14], 100*time.Millisecond) - s.Prefix = "Waiting for login to complete " - s.Start() - defer s.Stop() - - select { - case cfg := <-flow.LoginCh: - if err := conf.Write(cfg); err != nil { - return fmt.Errorf("write credentials: %v", err) - } - fmt.Fprintln(os.Stdout, "Successfully logged in!") - return nil - case <-time.After(10 * time.Minute): - flow.Close() - return fmt.Errorf("timed out") - } -} - -func doLogout() { - if err := conf.Logout(); err != nil { - fmt.Fprintln(os.Stderr, "could not logout:", err) - os.Exit(1) - } - fmt.Fprintln(os.Stdout, "encore: logged out.") -} - -func whoami() { - cfg, err := conf.CurrentUser() - if errors.Is(err, os.ErrNotExist) { - fmt.Fprint(os.Stdout, "not logged in.", newline) - } else if err != nil { - fatal(err) - } else { - fmt.Fprintf(os.Stdout, "logged in as %s%s", cfg.Email, newline) - } -} - -var newline string - -func init() { - switch runtime.GOOS { - case "windows": - newline = "\r\n" - default: - newline = "\n" - } -} diff --git a/cli/cmd/encore/auth/auth.go b/cli/cmd/encore/auth/auth.go new file mode 100644 index 0000000000..f4d06fd38c --- /dev/null +++ b/cli/cmd/encore/auth/auth.go @@ -0,0 +1,150 @@ +package auth + +import ( + "errors" + "fmt" + "os" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + "encr.dev/cli/internal/login" + "encr.dev/internal/conf" +) + +var authKey string + +func init() { + authCmd := &cobra.Command{ + Use: "auth", + Short: "Commands to authenticate with Encore", + } + + signupCmd := &cobra.Command{ + Use: "signup", + Short: "Create a new Encore account", + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + if err := DoLogin(DeviceAuth); err != nil { + cmdutil.Fatal(err) + } + }, + } + + loginCmd := &cobra.Command{ + Use: "login [--auth-key=]", + Short: "Log in to Encore", + + Run: func(cmd *cobra.Command, args []string) { + if authKey != "" { + if err := DoLoginWithAuthKey(); err != nil { + cmdutil.Fatal(err) + } + } else { + if err := DoLogin(DeviceAuth); err != nil { + cmdutil.Fatal(err) + } + } + }, + } + + logoutCmd := &cobra.Command{ + Use: "logout", + Short: "Logs out the currently logged in user", + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + DoLogout() + }, + } + + whoamiCmd := &cobra.Command{ + Use: "whoami", + Short: "Show the current logged in user", + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + Whoami() + }, + } + + authCmd.AddCommand(signupCmd) + + authCmd.AddCommand(loginCmd) + loginCmd.Flags().StringVarP(&authKey, "auth-key", "k", "", "Auth Key to use for login") + + authCmd.AddCommand(logoutCmd) + authCmd.AddCommand(whoamiCmd) + root.Cmd.AddCommand(authCmd) +} + +type Flow int + +const ( + AutoFlow Flow = iota + Interactive + DeviceAuth +) + +func DoLogin(flow Flow) (err error) { + var fn func() (*conf.Config, error) + switch flow { + case Interactive: + fn = login.Interactive + case DeviceAuth: + fn = login.DeviceAuth + default: + fn = login.DecideFlow + } + cfg, err := fn() + if err != nil { + return err + } + + if err := conf.Write(cfg); err != nil { + return fmt.Errorf("write credentials: %v", err) + } + fmt.Fprintln(os.Stdout, "Successfully logged in!") + return nil +} + +func DoLogout() { + if err := conf.Logout(); err != nil { + fmt.Fprintln(os.Stderr, "could not logout:", err) + os.Exit(1) + } + // Stop running daemon to clear any cached credentials + cmdutil.StopDaemon() + fmt.Fprintln(os.Stdout, "encore: logged out.") +} + +func DoLoginWithAuthKey() error { + cfg, err := login.WithAuthKey(authKey) + if err != nil { + return err + } + if err := conf.Write(cfg); err != nil { + return fmt.Errorf("write credentials: %v", err) + } + fmt.Fprintln(os.Stdout, "Successfully logged in!") + return nil +} + +func Whoami() { + cfg, err := conf.CurrentUser() + if err != nil { + if errors.Is(err, os.ErrNotExist) { + fmt.Fprint(os.Stdout, "not logged in.", cmdutil.Newline) + return + } + cmdutil.Fatal(err) + } + + if cfg.AppSlug != "" { + fmt.Fprintf(os.Stdout, "logged in as app %s%s", cfg.AppSlug, cmdutil.Newline) + } else { + fmt.Fprintf(os.Stdout, "logged in as %s%s", cfg.Email, cmdutil.Newline) + } +} diff --git a/cli/cmd/encore/bits/add.go b/cli/cmd/encore/bits/add.go new file mode 100644 index 0000000000..dbad38efbf --- /dev/null +++ b/cli/cmd/encore/bits/add.go @@ -0,0 +1,61 @@ +package bits + +import ( + "context" + "fmt" + "os" + + "github.com/cockroachdb/errors" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/pkg/bits" +) + +var addCmd = &cobra.Command{ + Use: "add []", + Short: "Add an Encore Bit to your application", + Args: cobra.MinimumNArgs(1), + + DisableFlagsInUseLine: true, + Run: func(c *cobra.Command, args []string) { + slug := args[0] + ctx := context.Background() + bit, err := bits.Get(ctx, slug) + if errors.Is(err, errBitNotFound) { + cmdutil.Fatalf("encore bit not found: %s", slug) + } else if err != nil { + cmdutil.Fatalf("could not lookup encore bit: %v", err) + } + + workdir, err := os.MkdirTemp("", "encore-bit") + if err != nil { + cmdutil.Fatal(err) + } + defer os.RemoveAll(workdir) + + //prefix := args[0] + //if len(args) > 1 { + // prefix = args[1] + //} + + fmt.Fprintf(os.Stderr, "Downloading Encore Bit: %s\n", bit.Title) + if err := bits.Extract(ctx, bit, workdir); err != nil { + cmdutil.Fatalf("download failed: %v", err) + } + + meta, err := bits.Describe(ctx, workdir) + if err != nil { + cmdutil.Fatalf("could not parse bit metadata: %v", err) + } + + fmt.Fprintf(os.Stderr, "successfully got bit: %+v\n", meta) + + //fmt.Fprintf(os.Stderr, "\n\nSuccessfully added Encore Bit: %s!\n", bit.Title) + //fmt.Fprintf(os.Stderr, "You can find the new bit under the %s/ directory.\n", prefix) + }, +} + +func init() { + bitsCmd.AddCommand(addCmd) +} diff --git a/cli/cmd/encore/bits/api.go b/cli/cmd/encore/bits/api.go new file mode 100644 index 0000000000..977230bf4d --- /dev/null +++ b/cli/cmd/encore/bits/api.go @@ -0,0 +1,64 @@ +package bits + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/url" + + "github.com/cockroachdb/errors" +) + +type Bit struct { + ID int64 + Slug string + Title string + Description string + GitRepo string + GitBranch string +} + +type ListResponse struct { + Bits []*Bit +} + +func List(ctx context.Context) ([]*Bit, error) { + resp, err := http.Get("https://automativity.encore.dev/bits") + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + slurp, _ := io.ReadAll(resp.Body) + return nil, errors.Newf("got status %d: %s", resp.StatusCode, slurp) + } + var data ListResponse + if err := json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil, errors.Wrap(err, "decode json response") + } + return data.Bits, nil +} + +var errBitNotFound = errors.New("bit not found") + +func Get(ctx context.Context, slug string) (*Bit, error) { + resp, err := http.Get("https://automativity.encore.dev/bits/" + url.PathEscape(slug)) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode == 404 { + return nil, errBitNotFound + } else if resp.StatusCode != 200 { + slurp, _ := io.ReadAll(resp.Body) + return nil, errors.Newf("got status %d: %s", resp.StatusCode, slurp) + } + var bit Bit + if err := json.NewDecoder(resp.Body).Decode(&bit); err != nil { + return nil, errors.Wrap(err, "decode json response") + } + return &bit, nil +} diff --git a/cli/cmd/encore/bits/bits.go b/cli/cmd/encore/bits/bits.go new file mode 100644 index 0000000000..50995c351a --- /dev/null +++ b/cli/cmd/encore/bits/bits.go @@ -0,0 +1,16 @@ +package bits + +import ( + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/root" +) + +var bitsCmd = &cobra.Command{ + Use: "bits", + Short: "Commands to manage encore bits, reusable functionality for Encore applications", +} + +func init() { + root.Cmd.AddCommand(bitsCmd) +} diff --git a/cli/cmd/encore/bits/list.go b/cli/cmd/encore/bits/list.go new file mode 100644 index 0000000000..c5877fe13e --- /dev/null +++ b/cli/cmd/encore/bits/list.go @@ -0,0 +1,37 @@ +package bits + +import ( + "context" + "fmt" + "os" + "text/tabwriter" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/pkg/bits" +) + +var listCmd = &cobra.Command{ + Use: "list", + Short: "Lists available Encore Bits to add to your application", + Args: cobra.ExactArgs(0), + Run: func(c *cobra.Command, args []string) { + bits, err := bits.List(context.Background()) + if err != nil { + cmdutil.Fatalf("could not list encore bits: %v", err) + } + + tw := tabwriter.NewWriter(os.Stdout, 0, 8, 0, '\t', 0) + fmt.Fprintln(tw, "ID\tTitle\tDescription") + for _, bit := range bits { + fmt.Fprintf(tw, "%s\t%s\t%s\n", bit.Slug, bit.Title, bit.Description) + fmt.Fprintln(tw) + } + tw.Flush() + }, +} + +func init() { + bitsCmd.AddCommand(listCmd) +} diff --git a/cli/cmd/encore/build.go b/cli/cmd/encore/build.go new file mode 100644 index 0000000000..954ee4a14e --- /dev/null +++ b/cli/cmd/encore/build.go @@ -0,0 +1,157 @@ +package main + +import ( + "context" + "fmt" + "os" + "os/signal" + "path/filepath" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/pkg/appfile" + daemonpb "encr.dev/proto/encore/daemon" +) + +var ( + targetOS = cmdutil.Oneof{ + Value: "linux", + Allowed: []string{"linux"}, + Flag: "os", + Desc: "the target operating system", + } + targetArch = cmdutil.Oneof{ + Value: "amd64", + Allowed: []string{"amd64", "arm64"}, + Flag: "arch", + Desc: "the target architecture", + } +) + +func init() { + buildCmd := &cobra.Command{ + Use: "build", + Aliases: []string{"eject"}, + Short: "build provides ways to build your application for deployment", + } + + p := buildParams{ + CgoEnabled: os.Getenv("CGO_ENABLED") == "1", + } + dockerBuildCmd := &cobra.Command{ + Use: "docker IMAGE_TAG", + Short: "docker builds a portable docker image of your Encore application", + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + p.Goarch = targetArch.Value + p.Goos = targetOS.Value + p.AppRoot, _ = determineAppRoot() + p.WorkspaceRoot = determineWorkspaceRoot(p.AppRoot) + file, err := appfile.ParseFile(filepath.Join(p.AppRoot, appfile.Name)) + if err == nil { + if !cmd.Flag("base").Changed && file.Lang == appfile.LangTS { + p.BaseImg = "node:slim" + } + if !cmd.Flag("cgo").Changed { + p.CgoEnabled = file.Build.CgoEnabled + } + } + p.ImageTag = args[0] + dockerBuild(p) + }, + } + + dockerBuildCmd.Flags().BoolVarP(&p.Push, "push", "p", false, "push image to remote repository") + dockerBuildCmd.Flags().StringVar(&p.BaseImg, "base", "scratch", "base image to build from") + dockerBuildCmd.Flags().BoolVar(&p.CgoEnabled, "cgo", false, "enable cgo") + dockerBuildCmd.Flags().BoolVar(&p.SkipInfraConf, "skip-config", false, "do not read or generate a infra configuration file") + dockerBuildCmd.Flags().StringVar(&p.InfraConfPath, "config", "", "infra configuration file path") + p.Services = dockerBuildCmd.Flags().StringSlice("services", nil, "services to include in the image") + p.Gateways = dockerBuildCmd.Flags().StringSlice("gateways", nil, "gateways to include in the image") + targetOS.AddFlag(dockerBuildCmd) + targetArch.AddFlag(dockerBuildCmd) + rootCmd.AddCommand(buildCmd) + buildCmd.AddCommand(dockerBuildCmd) +} + +type buildParams struct { + AppRoot string + WorkspaceRoot string + ImageTag string + Push bool + BaseImg string + Goos string + Goarch string + CgoEnabled bool + SkipInfraConf bool + InfraConfPath string + Services *[]string + Gateways *[]string +} + +func dockerBuild(p buildParams) { + interrupt := make(chan os.Signal, 1) + signal.Notify(interrupt, os.Interrupt) + + ctx, cancel := context.WithCancel(context.Background()) + go func() { + <-interrupt + cancel() + }() + + daemon := setupDaemon(ctx) + params := &daemonpb.DockerExportParams{ + BaseImageTag: p.BaseImg, + } + if p.Push { + params.PushDestinationTag = p.ImageTag + } else { + params.LocalDaemonTag = p.ImageTag + } + + var services, gateways []string + if p.Services != nil { + services = *p.Services + } + if p.Gateways != nil { + gateways = *p.Gateways + } + var err error + cfgPath := "" + if p.InfraConfPath != "" { + cfgPath, err = filepath.Abs(p.InfraConfPath) + if err != nil { + cmdutil.Fatalf("failed to resolve absolute path for %s: %v", p.InfraConfPath, err) + } + } + stream, err := daemon.Export(ctx, &daemonpb.ExportRequest{ + AppRoot: p.AppRoot, + WorkspaceRoot: p.WorkspaceRoot, + CgoEnabled: p.CgoEnabled, + Goos: p.Goos, + Goarch: p.Goarch, + Environ: os.Environ(), + Format: &daemonpb.ExportRequest_Docker{ + Docker: params, + }, + InfraConfPath: cfgPath, + Services: services, + Gateways: gateways, + SkipInfraConf: p.SkipInfraConf, + }) + if err != nil { + fmt.Fprintln(os.Stderr, "fatal: ", err) + os.Exit(1) + } + if code := cmdutil.StreamCommandOutput(stream, cmdutil.ConvertJSONLogs()); code != 0 { + os.Exit(code) + } +} + +func or(a, b string) string { + if a != "" { + return a + } + return b +} diff --git a/cli/cmd/encore/check.go b/cli/cmd/encore/check.go index a813e4b064..5c31ad2844 100644 --- a/cli/cmd/encore/check.go +++ b/cli/cmd/encore/check.go @@ -6,13 +6,22 @@ import ( "os" "os/signal" - daemonpb "encr.dev/proto/encore/daemon" "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + daemonpb "encr.dev/proto/encore/daemon" +) + +var ( + codegenDebug bool + checkParseTests bool ) var checkCmd = &cobra.Command{ Use: "check", - Short: "Checks your application for errors", + Short: "Checks your application for compile-time errors using Encore's compiler.", + + DisableFlagsInUseLine: true, Run: func(cmd *cobra.Command, args []string) { appRoot, relPath := determineAppRoot() runChecks(appRoot, relPath) @@ -21,6 +30,8 @@ var checkCmd = &cobra.Command{ func init() { rootCmd.AddCommand(checkCmd) + checkCmd.Flags().BoolVar(&codegenDebug, "codegen-debug", false, "Dump generated code (for debugging Encore's code generation)") + checkCmd.Flags().BoolVar(&checkParseTests, "tests", false, "Parse tests as well") } func runChecks(appRoot, relPath string) { @@ -34,10 +45,16 @@ func runChecks(appRoot, relPath string) { }() daemon := setupDaemon(ctx) - stream, err := daemon.Check(ctx, &daemonpb.CheckRequest{AppRoot: appRoot, WorkingDir: relPath}) + stream, err := daemon.Check(ctx, &daemonpb.CheckRequest{ + AppRoot: appRoot, + WorkingDir: relPath, + CodegenDebug: codegenDebug, + ParseTests: checkParseTests, + Environ: os.Environ(), + }) if err != nil { fmt.Fprintln(os.Stderr, "fatal: ", err) os.Exit(1) } - streamCommandOutput(stream) + os.Exit(cmdutil.StreamCommandOutput(stream, nil)) } diff --git a/cli/cmd/encore/cmdutil/autocompletes.go b/cli/cmd/encore/cmdutil/autocompletes.go new file mode 100644 index 0000000000..d484c75848 --- /dev/null +++ b/cli/cmd/encore/cmdutil/autocompletes.go @@ -0,0 +1,87 @@ +package cmdutil + +import ( + "fmt" + "strings" + + "github.com/spf13/cobra" + + "encr.dev/cli/internal/platform" + "encr.dev/internal/conf" +) + +func AutoCompleteFromStaticList(args ...string) func(cmd *cobra.Command, _ []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return func(cmd *cobra.Command, _ []string, toComplete string) (rtn []string, dir cobra.ShellCompDirective) { + toComplete = strings.ToLower(toComplete) + + for _, option := range args { + before, _, _ := strings.Cut(option, "\t") + + if strings.HasPrefix(before, toComplete) { + rtn = append(rtn, option) + } + } + + return rtn, cobra.ShellCompDirectiveNoFileComp + } +} + +func AutoCompleteAppSlug(cmd *cobra.Command, _ []string, toComplete string) ([]string, cobra.ShellCompDirective) { + // incase of not being logged in or an error, we give no auto competition + _, err := conf.CurrentUser() + if err != nil { + return nil, cobra.ShellCompDirectiveError + } + + apps, err := platform.ListApps(cmd.Context()) + if err != nil { + return nil, cobra.ShellCompDirectiveError + } + + toComplete = strings.ToLower(toComplete) + + rtn := make([]string, 0, len(apps)) + for _, app := range apps { + if strings.HasPrefix(strings.ToLower(app.Slug), toComplete) { + desc := app.Description + if desc == "" { + desc = app.Name + } + + rtn = append(rtn, fmt.Sprintf("%s\t%s", app.Slug, desc)) + } + } + + return rtn, cobra.ShellCompDirectiveNoFileComp +} + +func AutoCompleteEnvSlug(cmd *cobra.Command, args []string, toComplete string) (rtn []string, dir cobra.ShellCompDirective) { + toComplete = strings.ToLower(toComplete) + + // Support the local environment + if strings.HasPrefix("local", toComplete) { + rtn = append(rtn, "local\tThis local development environment") + } + + _, err := conf.CurrentUser() + if err != nil { + return rtn, cobra.ShellCompDirectiveError + } + + // Assume the app slug is the first argument + appSlug := args[len(args)-1] + + // Get the environments for the app and filter by what the user has already entered + envs, err := platform.ListEnvs(cmd.Context(), appSlug) + if err != nil { + return rtn, cobra.ShellCompDirectiveError + } + + for _, env := range envs { + if strings.HasPrefix(strings.ToLower(env.Slug), toComplete) { + rtn = append(rtn, fmt.Sprintf("%s\tA %s enviroment running on %s", env.Slug, env.Type, env.Cloud)) + } + } + + return rtn, cobra.ShellCompDirectiveNoFileComp +} diff --git a/cli/cmd/encore/cmdutil/cmdutil.go b/cli/cmd/encore/cmdutil/cmdutil.go new file mode 100644 index 0000000000..c1c8883b25 --- /dev/null +++ b/cli/cmd/encore/cmdutil/cmdutil.go @@ -0,0 +1,187 @@ +package cmdutil + +import ( + "encoding/json" + "errors" + "fmt" + "io/fs" + "os" + "path/filepath" + "runtime" + + "github.com/fatih/color" + "golang.org/x/crypto/ssh/terminal" + "google.golang.org/grpc/status" + + "encr.dev/cli/internal/manifest" + "encr.dev/pkg/appfile" + "encr.dev/pkg/errinsrc" + "encr.dev/pkg/errlist" +) + +var ( + ErrNoEncoreApp = errors.New("no encore.app found in directory (or any of the parent directories)") + ErrEncoreAppIsDir = errors.New("encore.app is a directory, not a file") +) + +// MaybeAppRoot determines the app root by looking for the "encore.app" file, +// initially in the current directory and then recursively in parent directories +// up to the filesystem root. +// +// It reports the absolute path to the app root, and the +// relative path from the app root to the working directory. +func MaybeAppRoot() (appRoot, relPath string, err error) { + dir, err := os.Getwd() + if err != nil { + return "", "", err + } + return FindAppRootFromDir(dir) +} + +func FindAppRootFromDir(dir string) (appRoot, relPath string, err error) { + rel := "." + for { + path := filepath.Join(dir, "encore.app") + fi, err := os.Stat(path) + if errors.Is(err, fs.ErrNotExist) { + dir2 := filepath.Dir(dir) + if dir2 == dir { + return "", "", ErrNoEncoreApp + } + rel = filepath.Join(filepath.Base(dir), rel) + dir = dir2 + continue + } else if err != nil { + return "", "", err + } else if fi.IsDir() { + return "", "", ErrEncoreAppIsDir + } else { + return dir, rel, nil + } + } +} + +// AppRoot is like MaybeAppRoot but instead of returning an error +// it prints it to stderr and exits. +func AppRoot() (appRoot, relPath string) { + appRoot, relPath, err := MaybeAppRoot() + if err != nil { + Fatal(err) + } + return appRoot, relPath +} + +// WorkspaceRoot determines the workspace root by looking for the .git folder in app root or parents to it. +// It reports the absolute path to the workspace root. +func WorkspaceRoot(appRoot string) string { + dir := appRoot + for { + path := filepath.Join(dir, ".git") + fi, err := os.Stat(path) + if errors.Is(err, fs.ErrNotExist) { + dir2 := filepath.Dir(dir) + if dir2 == dir { + return appRoot + } + dir = dir2 + continue + } else if err != nil { + Fatal(err) + } else if !fi.IsDir() { + continue + } else { + return dir + } + } +} + +func AppSlugOrLocalID() string { + appRoot, _ := AppRoot() + appID, _ := appfile.Slug(appRoot) + if appID == "" { + mf, err := manifest.ReadOrCreate(appRoot) + if err != nil { + Fatalf("failed to read app manifest: %v", err) + } + appID = mf.LocalID + } + return appID +} + +// AppSlug reports the current app's app slug. +// It throws a fatal error if the app is not connected with the Encore Platform. +func AppSlug() string { + appRoot, _ := AppRoot() + appSlug, err := appfile.Slug(appRoot) + if err != nil { + Fatal(err) + } else if appSlug == "" { + Fatal("app is not linked with the Encore Platform (see 'encore app link')") + } + return appSlug +} + +func Fatal(args ...any) { + // Prettify gRPC errors + for i, arg := range args { + if err, ok := arg.(error); ok { + if s, ok := status.FromError(err); ok { + args[i] = s.Message() + } + } + } + + red := color.New(color.FgRed) + _, _ = red.Fprint(os.Stderr, "error: ") + _, _ = red.Fprintln(os.Stderr, args...) + os.Exit(1) +} + +func Fatalf(format string, args ...any) { + // Prettify gRPC errors + for i, arg := range args { + if err, ok := arg.(error); ok { + if s, ok := status.FromError(err); ok { + args[i] = s.Message() + } + } + } + + Fatal(fmt.Sprintf(format, args...)) +} + +func DisplayError(out *os.File, err []byte) { + if len(err) == 0 { + return + } + + // Get the width of the terminal we're rendering in + // if we can so we render using the most space possible. + width, _, sizeErr := terminal.GetSize(int(out.Fd())) + if sizeErr == nil { + errinsrc.TerminalWidth = width + } + + // Unmarshal the error into a structured errlist + errList := errlist.New(nil) + if err := json.Unmarshal(err, &errList); err != nil { + Fatalf("unable to parse error: %v", err) + } + + if errList.Len() == 0 { + return + } + + _, _ = os.Stderr.Write([]byte(errList.Error())) +} + +var Newline string + +func init() { + switch runtime.GOOS { + case "windows": + Newline = "\r\n" + default: + Newline = "\n" + } +} diff --git a/cli/cmd/encore/cmdutil/daemon.go b/cli/cmd/encore/cmdutil/daemon.go new file mode 100644 index 0000000000..728532cd96 --- /dev/null +++ b/cli/cmd/encore/cmdutil/daemon.go @@ -0,0 +1,188 @@ +package cmdutil + +import ( + "context" + "fmt" + "net" + "os" + "os/exec" + "path/filepath" + "time" + + "github.com/golang/protobuf/ptypes/empty" + "google.golang.org/genproto/googleapis/rpc/errdetails" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "encr.dev/internal/version" + "encr.dev/pkg/xos" + daemonpb "encr.dev/proto/encore/daemon" +) + +func IsDaemonRunning(ctx context.Context) bool { + socketPath, err := daemonSockPath() + if err != nil { + return false + } + if _, err := xos.SocketStat(socketPath); err == nil { + // The socket exists; check that it is responsive. + if cc, err := dialDaemon(ctx, socketPath); err == nil { + _ = cc.Close() + return true + } + // socket is not responding, remove it + _ = os.Remove(socketPath) + } + return false + +} + +// ConnectDaemon returns a client connection to the Encore daemon. +// By default, it will start the daemon if it is not already running. +func ConnectDaemon(ctx context.Context) daemonpb.DaemonClient { + socketPath, err := daemonSockPath() + if err != nil { + fmt.Fprintln(os.Stderr, "fatal: ", err) + os.Exit(1) + } + + if _, err := xos.SocketStat(socketPath); err == nil { + // The socket exists; check that it is responsive. + if cc, err := dialDaemon(ctx, socketPath); err == nil { + // Make sure the daemon is running an up-to-date version; + // restart it otherwise. + cl := daemonpb.NewDaemonClient(cc) + if resp, err := cl.Version(ctx, &empty.Empty{}); err == nil { + diff := version.Compare(resp.Version) + switch { + case diff < 0: + // Daemon is running a newer version + return cl + case diff == 0: + if configHash, err := version.ConfigHash(); err != nil { + Fatal("unable to get config path: ", err) + } else if configHash == resp.ConfigHash { + return cl + } + + // If we're running a development release, and so is the daemon, don't restart. + // This is to avoid spurious restarts during development. + if version.Channel == version.DevBuild && version.ChannelFor(resp.Version) == version.DevBuild { + return cl + } + + // Daemon is running the same version but different config + fmt.Fprintf(os.Stderr, "encore: restarting daemon due to configuration change.\n") + case diff > 0: + fmt.Fprintf(os.Stderr, "encore: daemon is running an outdated version (%s), restarting.\n", resp.Version) + } + } + } + // Remove the socket file which triggers the daemon to exit. + _ = os.Remove(socketPath) + } + + // Start the daemon. + if err := StartDaemonInBackground(ctx); err != nil { + Fatal("starting daemon: ", err) + } + cc, err := dialDaemon(ctx, socketPath) + if err != nil { + Fatal("dialing daemon: ", err) + } + return daemonpb.NewDaemonClient(cc) +} + +func StopDaemon() { + socketPath, err := daemonSockPath() + if err != nil { + Fatal("stopping daemon: ", err) + } + if _, err := xos.SocketStat(socketPath); err == nil { + _ = os.Remove(socketPath) + } +} + +// daemonSockPath reports the path to the Encore daemon unix socket. +func daemonSockPath() (string, error) { + cacheDir, err := os.UserCacheDir() + if err != nil { + return "", fmt.Errorf("could not determine cache dir: %v", err) + } + return filepath.Join(cacheDir, "encore", "encored.sock"), nil +} + +// StartDaemonInBackground starts the Encore daemon in the background. +func StartDaemonInBackground(ctx context.Context) error { + socketPath, err := daemonSockPath() + if err != nil { + return err + } + + // nosemgrep + exe, err := os.Executable() + if err != nil { + exe, err = exec.LookPath("encore") + } + if err != nil { + return fmt.Errorf("could not determine location of encore executable: %v", err) + } + // nosemgrep + cmd := exec.Command(exe, "daemon", "-f") + cmd.SysProcAttr = xos.CreateNewProcessGroup() + if err := cmd.Start(); err != nil { + return fmt.Errorf("could not start encore daemon: %v", err) + } + + // Wait for it to come up + for i := 0; i < 50; i++ { + if err := ctx.Err(); err != nil { + return err + } + time.Sleep(100 * time.Millisecond) + if _, err := xos.SocketStat(socketPath); err == nil { + return nil + } + } + return fmt.Errorf("timed out waiting for daemon to start") +} + +func dialDaemon(ctx context.Context, socketPath string) (*grpc.ClientConn, error) { + ctx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) + defer cancel() + + dialer := func(ctx context.Context, addr string) (net.Conn, error) { + return (&net.Dialer{}).DialContext(ctx, "unix", socketPath) + } + // Set max message size to 16mb (up from default 4mb) for json formatted debug metadata for large applications. + return grpc.DialContext(ctx, "", + grpc.WithInsecure(), + grpc.WithBlock(), + grpc.WithUnaryInterceptor(errInterceptor), + grpc.WithContextDialer(dialer), + grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(16*1024*1024)), + ) +} + +func errInterceptor(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + err := invoker(ctx, method, req, reply, cc, opts...) + if err != nil { + if st, ok := status.FromError(err); ok { + if st.Code() == codes.Unauthenticated { + Fatal("not logged in: run 'encore auth login' first") + } + for _, detail := range st.Details() { + switch t := detail.(type) { + case *errdetails.PreconditionFailure: + for _, violation := range t.Violations { + if violation.Type == "INVALID_REFRESH_TOKEN" { + Fatal("OAuth refresh token was invalid. Please run `encore auth login` again.") + } + } + } + } + } + } + return err +} diff --git a/cli/cmd/encore/cmdutil/output.go b/cli/cmd/encore/cmdutil/output.go new file mode 100644 index 0000000000..be78f7802e --- /dev/null +++ b/cli/cmd/encore/cmdutil/output.go @@ -0,0 +1,111 @@ +package cmdutil + +import ( + "errors" + "slices" + "strconv" + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +type Oneof struct { + Value string + Allowed []string + Flag string // defaults to "output" if empty + FlagShort string // defaults to "o" if both Flag and FlagShort are empty + Desc string // usage desc + TypeDesc string // type description, defaults to the name of the flag + NoOptDefVal string // default value when no option is provided +} + +func (o *Oneof) AddFlag(cmd *cobra.Command) { + name, short := o.FlagName() + cmd.Flags().AddFlag( + &pflag.Flag{ + Name: name, + NoOptDefVal: o.NoOptDefVal, + Shorthand: short, + Usage: o.Usage(), + Value: o, + DefValue: o.String(), + }) + _ = cmd.RegisterFlagCompletionFunc(name, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return o.Allowed, cobra.ShellCompDirectiveNoFileComp + }) +} + +func (o *Oneof) FlagName() (name, short string) { + name, short = o.Flag, o.FlagShort + if name == "" { + name, short = "output", "o" + } + return name, short +} + +func (o *Oneof) String() string { + return o.Value +} + +func (o *Oneof) Type() string { + if o.TypeDesc != "" { + return o.TypeDesc + } + name, _ := o.FlagName() + return name +} + +func (o *Oneof) Set(v string) error { + if slices.Contains(o.Allowed, v) { + o.Value = v + return nil + } + + var b strings.Builder + b.WriteString("must be one of ") + o.oneOf(&b) + return errors.New(b.String()) +} + +func (o *Oneof) Usage() string { + var b strings.Builder + desc := o.Desc + if desc == "" { + desc = "Output format" + } + b.WriteString(desc + ". One of (") + o.oneOf(&b) + b.WriteString(").") + return b.String() +} + +// Alternatives lists the alternatives in the format "a|b|c". +func (o *Oneof) Alternatives() string { + var b strings.Builder + for i, s := range o.Allowed { + if i > 0 { + b.WriteByte('|') + } + b.WriteString(s) + } + return b.String() +} + +func (o *Oneof) oneOf(b *strings.Builder) { + n := len(o.Allowed) + for i, s := range o.Allowed { + if i > 0 { + switch { + case n == 2: + b.WriteString(" or ") + case i == n-1: + b.WriteString(", or ") + default: + b.WriteString(", ") + } + } + + b.WriteString(strconv.Quote(s)) + } +} diff --git a/cli/cmd/encore/cmdutil/stream.go b/cli/cmd/encore/cmdutil/stream.go new file mode 100644 index 0000000000..e62790cf83 --- /dev/null +++ b/cli/cmd/encore/cmdutil/stream.go @@ -0,0 +1,203 @@ +package cmdutil + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + "io" + "os" + "strings" + "sync" + + "github.com/logrusorgru/aurora/v3" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "golang.org/x/crypto/ssh/terminal" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "encr.dev/pkg/ansi" + "encr.dev/proto/encore/daemon" +) + +// CommandOutputStream is the interface for gRPC streams that +// stream the output of a command. +type CommandOutputStream interface { + Recv() (*daemon.CommandMessage, error) +} + +type OutputConverter func(line []byte) []byte + +// StreamCommandOutput streams the output from the given command stream, +// and reports the command's exit code. +// If convertJSON is true, lines that look like JSON are fed through +// zerolog's console writer. +func StreamCommandOutput(stream CommandOutputStream, converter OutputConverter) int { + var outWrite io.Writer = os.Stdout + var errWrite io.Writer = os.Stderr + + var writesDone sync.WaitGroup + defer writesDone.Wait() + + if converter != nil { + // Create a pipe that we read from line-by-line so we can detect JSON lines. + outRead, outw := io.Pipe() + errRead, errw := io.Pipe() + outWrite = outw + errWrite = errw + defer func() { _ = outw.Close() }() + defer func() { _ = errw.Close() }() + + for i, read := range []io.Reader{outRead, errRead} { + read := read + stdout := i == 0 + writesDone.Add(1) + go func() { + defer writesDone.Done() + + for { + scanner := bufio.NewScanner(read) + for scanner.Scan() { + line := append(scanner.Bytes(), '\n') + line = converter(line) + if stdout { + _, _ = os.Stdout.Write(line) + } else { + _, _ = os.Stderr.Write(line) + } + } + if err := scanner.Err(); err != nil { + // The scanner failed, likely due to a too-long line. Log an error + // and create a new scanner since the old one is in an unrecoverable state. + fmt.Fprintln(os.Stderr, "failed to read output:", err) + scanner = bufio.NewScanner(read) + continue + } else { + break + } + } + }() + } + } + + for { + msg, err := stream.Recv() + if err != nil { + st := status.Convert(err) + switch { + case st.Code() == codes.FailedPrecondition: + _, _ = fmt.Fprintln(os.Stderr, st.Message()) + return 1 + case err == io.EOF || st.Code() == codes.Canceled || strings.HasSuffix(err.Error(), "error reading from server: EOF"): + return 0 + default: + log.Fatal().Err(err).Msg("connection failure") + } + } + + switch m := msg.Msg.(type) { + case *daemon.CommandMessage_Output: + if m.Output.Stdout != nil { + _, _ = outWrite.Write(m.Output.Stdout) + } + if m.Output.Stderr != nil { + _, _ = errWrite.Write(m.Output.Stderr) + } + case *daemon.CommandMessage_Errors: + DisplayError(os.Stderr, m.Errors.Errinsrc) + + case *daemon.CommandMessage_Exit: + return int(m.Exit.Code) + } + } +} + +type ConvertLogOptions struct { + Color bool +} + +type ConvertLogOption func(*ConvertLogOptions) + +func Colorize(enable bool) ConvertLogOption { + return func(clo *ConvertLogOptions) { + clo.Color = enable + } +} + +func ConvertJSONLogs(opts ...ConvertLogOption) OutputConverter { + // Default to colorized output. + options := ConvertLogOptions{Color: true} + + for _, opt := range opts { + opt(&options) + } + + var logMutex sync.Mutex + logLineBuffer := bytes.NewBuffer(make([]byte, 0, 1024)) + cout := zerolog.NewConsoleWriter(func(w *zerolog.ConsoleWriter) { + w.Out = logLineBuffer + w.FieldsExclude = []string{"stack"} + w.FormatExtra = func(vals map[string]any, buf *bytes.Buffer) error { + if stack, ok := vals["stack"]; ok { + return FormatStack(stack, buf) + } + return nil + } + }) + if !options.Color { + cout.NoColor = true + } + + return func(line []byte) []byte { + // If this isn't a JSON log line, just return it as-is + if len(line) == 0 || line[0] != '{' { + return line + } + + // Otherwise grab the converter buffer and reset it + logMutex.Lock() + defer logMutex.Unlock() + logLineBuffer.Reset() + + // Then convert the JSON log line to pretty formatted text + _, err := cout.Write(line) + if err != nil { + return line + } + out := make([]byte, len(logLineBuffer.Bytes())) + copy(out, logLineBuffer.Bytes()) + return out + } +} + +func FormatStack(val any, buf *bytes.Buffer) error { + var frames []struct { + File string + Line int + Func string + } + + if jsonRepr, err := json.Marshal(val); err != nil { + return err + } else if err := json.Unmarshal(jsonRepr, &frames); err != nil { + return err + } + for _, f := range frames { + fmt.Fprintf(buf, "\n %s\n %s", + f.Func, + aurora.Gray(12, fmt.Sprintf("%s:%d", f.File, f.Line))) + } + return nil +} + +func ClearTerminalExceptFirstNLines(n int) { + // Clear the screen except for the first line. + if _, height, err := terminal.GetSize(int(os.Stdout.Fd())); err == nil { + count := height - (1 + n) + if count > 0 { + _, _ = os.Stdout.Write(bytes.Repeat([]byte{'\n'}, count)) + } + _, _ = fmt.Fprint(os.Stdout, ansi.SetCursorPosition(2, 1)+ansi.ClearScreen(ansi.CursorToBottom)) + } +} diff --git a/cli/cmd/encore/config/config.go b/cli/cmd/encore/config/config.go new file mode 100644 index 0000000000..0723ba80fd --- /dev/null +++ b/cli/cmd/encore/config/config.go @@ -0,0 +1,127 @@ +package config + +import ( + "fmt" + "os" + "strings" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + "encr.dev/internal/userconfig" + "github.com/spf13/cobra" +) + +var ( + forceApp, forceGlobal bool + viewAllSettings bool +) + +var autoCompleteConfigKeys = cmdutil.AutoCompleteFromStaticList(userconfig.Keys()...) + +var longDocs = `Gets or sets configuration values for customizing the behavior of the Encore CLI. + +Configuration options can be set both for individual Encore applications, +as well as globally for the local user. + +Configuration options can be set using ` + bt("encore config ") + `, +and options can similarly be read using ` + bt("encore config ") + `. + +When running ` + bt("encore config") + ` within an Encore application, +it automatically sets and gets configuration for that application. + +To set or get global configuration, use the ` + bt("--global") + ` flag. + +Available configuration settings are: + +` + userconfig.CLIDocs() + +var configCmd = &cobra.Command{ + Use: "config []", + Short: "Get or set a configuration value", + Long: longDocs, + Args: cobra.RangeArgs(0, 2), + + Run: func(cmd *cobra.Command, args []string) { + appRoot, _, _ := cmdutil.MaybeAppRoot() + + appScope := appRoot != "" + if forceApp { + appScope = true + } else if forceGlobal { + appScope = false + } + + if appScope && appRoot == "" { + // If the user specified --app, error if there is no app. + cmdutil.Fatal(cmdutil.ErrNoEncoreApp) + } + + if len(args) == 2 { + var err error + if appScope { + err = userconfig.SetForApp(appRoot, args[0], args[1]) + } else { + err = userconfig.SetGlobal(args[0], args[1]) + } + if err != nil { + cmdutil.Fatal(err) + } + } else { + var ( + cfg *userconfig.Config + err error + ) + if appScope { + appRoot, _ := cmdutil.AppRoot() + cfg, err = userconfig.ForApp(appRoot).Get() + } else { + cfg, err = userconfig.Global().Get() + } + if err != nil { + cmdutil.Fatal(err) + } + + if viewAllSettings { + if len(args) > 0 { + cmdutil.Fatalf("cannot specify a settings key when using --all") + } + s := strings.TrimSuffix(cfg.Render(), "\n") + fmt.Println(s) + return + } + + if len(args) == 0 { + // No args are only allowed when --all is specified. + _ = cmd.Usage() + os.Exit(1) + } + + val, ok := cfg.GetByKey(args[0]) + if !ok { + cmdutil.Fatalf("unknown key %q", args[0]) + } + fmt.Printf("%v\n", val) + } + }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + if len(args) == 0 { + // Completing the first argument, the config key + return autoCompleteConfigKeys(cmd, args, toComplete) + } + return nil, cobra.ShellCompDirectiveNoFileComp + }, +} + +func init() { + configCmd.Flags().BoolVar(&viewAllSettings, "all", false, "view all settings") + configCmd.Flags().BoolVar(&forceApp, "app", false, "set the value for the current app") + configCmd.Flags().BoolVar(&forceGlobal, "global", false, "set the value at the global level") + configCmd.MarkFlagsMutuallyExclusive("app", "global") + + root.Cmd.AddCommand(configCmd) +} + +// bt renders a backtick-enclosed string. +func bt(val string) string { + return fmt.Sprintf("`%s`", val) +} diff --git a/cli/cmd/encore/daemon.go b/cli/cmd/encore/daemon.go index f8897f2857..c2d11933cf 100644 --- a/cli/cmd/encore/daemon.go +++ b/cli/cmd/encore/daemon.go @@ -3,19 +3,14 @@ package main import ( "context" "fmt" - "net" "os" - "os/exec" - "path/filepath" - "time" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" daemonpkg "encr.dev/cli/cmd/encore/daemon" - "encr.dev/cli/internal/xos" + "encr.dev/internal/env" daemonpb "encr.dev/proto/encore/daemon" - "github.com/golang/protobuf/ptypes/empty" - "github.com/spf13/cobra" - "golang.org/x/mod/semver" - "google.golang.org/grpc" ) var daemonizeForeground bool @@ -25,9 +20,9 @@ var daemonCmd = &cobra.Command{ Short: "Starts the encore daemon", Run: func(cc *cobra.Command, args []string) { if daemonizeForeground { - daemonpkg.Main(Version) + daemonpkg.Main() } else { - if err := daemonize(context.Background()); err != nil { + if err := cmdutil.StartDaemonInBackground(context.Background()); err != nil { fatal(err) } fmt.Fprintln(os.Stdout, "encore daemon is now running") @@ -38,96 +33,20 @@ var daemonCmd = &cobra.Command{ func init() { rootCmd.AddCommand(daemonCmd) daemonCmd.Flags().BoolVarP(&daemonizeForeground, "foreground", "f", false, "Start the daemon in the foreground") + daemonCmd.AddCommand(daemonEnvCmd) } -// daemonize starts the Encore daemon in the background. -func daemonize(ctx context.Context) error { - socketPath, err := daemonSockPath() - if err != nil { - return err - } - - exe, err := os.Executable() - if err != nil { - exe, err = exec.LookPath("encore") - } - if err != nil { - return fmt.Errorf("could not determine location of encore executable: %v", err) - } - cmd := exec.Command(exe, "daemon", "-f") - cmd.SysProcAttr = xos.CreateNewProcessGroup() - if err := cmd.Start(); err != nil { - return fmt.Errorf("could not start encore daemon: %v", err) - } - - // Wait for it to come up - for i := 0; i < 50; i++ { - if err := ctx.Err(); err != nil { - return err - } - time.Sleep(100 * time.Millisecond) - if _, err := xos.SocketStat(socketPath); err == nil { - return nil - } - } - return fmt.Errorf("timed out waiting for daemon to start") -} - -// daemonSockPath reports the path to the Encore daemon unix socket. -func daemonSockPath() (string, error) { - cacheDir, err := os.UserCacheDir() - if err != nil { - return "", fmt.Errorf("could not determine cache dir: %v", err) - } - return filepath.Join(cacheDir, "encore", "encored.sock"), nil -} - -// setupDaemon sets up the Encore daemon if it isn't already running -// and returns a client connected to it. func setupDaemon(ctx context.Context) daemonpb.DaemonClient { - socketPath, err := daemonSockPath() - if err != nil { - fmt.Fprintln(os.Stderr, "fatal: ", err) - os.Exit(1) - } - - if _, err := xos.SocketStat(socketPath); err == nil { - // The socket exists; check that it is responsive. - if cc, err := dialDaemon(ctx, socketPath); err == nil { - // Make sure the daemon is running an up-to-date version; - // restart it otherwise. - cl := daemonpb.NewDaemonClient(cc) - if resp, err := cl.Version(ctx, &empty.Empty{}); err == nil { - if semver.Compare(Version, resp.Version) >= 0 { - return cl - } - fmt.Fprintf(os.Stderr, "encore: daemon is running an outdated version (%s), restarting.\n", resp.Version) - } - } - // Remove the socket file which triggers the daemon to exit. - os.Remove(socketPath) - } - - // Start the daemon. - if err := daemonize(ctx); err != nil { - fatal("starting daemon: ", err) - } - cc, err := dialDaemon(ctx, socketPath) - if err != nil { - fatal("dialing daemon: ", err) - } - return daemonpb.NewDaemonClient(cc) + return cmdutil.ConnectDaemon(ctx) } -func dialDaemon(ctx context.Context, socketPath string) (*grpc.ClientConn, error) { - ctx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) - defer cancel() - - dialer := func(ctx context.Context, addr string) (net.Conn, error) { - return (&net.Dialer{}).DialContext(ctx, "unix", socketPath) - } - return grpc.DialContext(ctx, "", - grpc.WithInsecure(), - grpc.WithBlock(), - grpc.WithContextDialer(dialer)) +var daemonEnvCmd = &cobra.Command{ + Use: "env", + Short: "Prints Encore environment information", + Run: func(cc *cobra.Command, args []string) { + envs := env.List() + for _, e := range envs { + fmt.Println(e) + } + }, } diff --git a/cli/cmd/encore/daemon/daemon.go b/cli/cmd/encore/daemon/daemon.go index 07bd91254f..2ce07e859e 100644 --- a/cli/cmd/encore/daemon/daemon.go +++ b/cli/cmd/encore/daemon/daemon.go @@ -1,72 +1,120 @@ package daemon import ( + "context" + "database/sql" + "embed" + _ "embed" // for go:embed "fmt" "io" + "io/fs" "net" "net/http" + "net/http/pprof" + "net/netip" "os" + "os/signal" "path/filepath" + "strconv" + "strings" + "syscall" "time" + "github.com/cenkalti/backoff/v4" + "github.com/cockroachdb/errors" + "github.com/golang-migrate/migrate/v4" + "github.com/golang-migrate/migrate/v4/database" + "github.com/golang-migrate/migrate/v4/database/sqlite3" + "github.com/golang-migrate/migrate/v4/source/iofs" + _ "github.com/mattn/go-sqlite3" // for "sqlite3" driver + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "google.golang.org/genproto/googleapis/rpc/errdetails" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "encr.dev/cli/daemon" + "encr.dev/cli/daemon/apps" "encr.dev/cli/daemon/dash" + "encr.dev/cli/daemon/engine" + "encr.dev/cli/daemon/engine/trace2" + "encr.dev/cli/daemon/engine/trace2/sqlite" + "encr.dev/cli/daemon/mcp" + "encr.dev/cli/daemon/namespace" + "encr.dev/cli/daemon/objects" "encr.dev/cli/daemon/run" - "encr.dev/cli/daemon/runtime" - "encr.dev/cli/daemon/runtime/trace" "encr.dev/cli/daemon/secret" "encr.dev/cli/daemon/sqldb" - "encr.dev/cli/internal/conf" - "encr.dev/cli/internal/xos" + "encr.dev/cli/daemon/sqldb/docker" + "encr.dev/cli/daemon/sqldb/external" + "encr.dev/internal/conf" + "encr.dev/internal/env" + "encr.dev/pkg/eerror" + "encr.dev/pkg/option" + "encr.dev/pkg/watcher" + "encr.dev/pkg/xos" daemonpb "encr.dev/proto/encore/daemon" - "encr.dev/proto/encore/server/remote" - "github.com/rs/zerolog" - "github.com/rs/zerolog/log" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/credentials/oauth" - "google.golang.org/grpc/keepalive" ) // Main runs the daemon. -func Main(version string) { - if err := runMain(version); err != nil { +func Main() { + watcher.BumpRLimitSoftToHardLimit() + + if err := redirectLogOutput(); err != nil { + log.Error().Err(err).Msg("could not setup daemon log file, skipping") + } + if err := runMain(); err != nil { log.Fatal().Err(err).Msg("daemon failed") } } -func runMain(version string) (err error) { - // xit receives signals from the different subsystems +func runMain() (err error) { + ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT) + defer cancel() + + // exit receives signals from the different subsystems // that something went wrong and it's time to exit. // Sending nil indicates it's time to gracefully exit. exit := make(chan error) - d := &Daemon{exit: exit, Version: version} + d := &Daemon{dev: conf.DevDaemon, exit: exit} defer handleBailout(&err) defer d.closeAll() - d.init() + d.init(ctx) d.serve() - return <-exit + select { + case err := <-exit: + return err + case <-ctx.Done(): + return nil + } } // Daemon orchestrates setting up the different daemon subsystems. type Daemon struct { - Log zerolog.Logger - Daemon *net.UnixListener - Runtime *net.TCPListener - DBProxy *net.TCPListener - Dash *net.TCPListener - Version string - - Remote remote.RemoteClient - Secret *secret.Manager - RunMgr *run.Manager - ClusterMgr *sqldb.ClusterManager - Trace *trace.Store - DashSrv *dash.Server - Server *daemon.Server + Daemon *net.UnixListener + Runtime *retryingTCPListener + DBProxy *retryingTCPListener + Dash *retryingTCPListener + Debug *retryingTCPListener + ObjectStorage *retryingTCPListener + MCP *retryingTCPListener + EncoreDB *sql.DB + + Apps *apps.Manager + Secret *secret.Manager + RunMgr *run.Manager + NS *namespace.Manager + ClusterMgr *sqldb.ClusterManager + ObjectsMgr *objects.ClusterManager + MCPMgr *mcp.Manager + PublicBuckets *objects.PublicBucketServer + Trace trace2.Store + Server *daemon.Server + dev bool // whether we're in development mode // exit is a channel that shuts down the daemon when sent on. // A nil error indicates graceful exit. @@ -76,24 +124,66 @@ type Daemon struct { close []io.Closer } -func (d *Daemon) init() { +func (d *Daemon) init(ctx context.Context) { d.Daemon = d.listenDaemonSocket() - d.Runtime = d.listenTCP() - d.DBProxy = d.listenTCP() - d.Dash = d.listenTCP() - - d.Trace = trace.NewStore() - d.ClusterMgr = sqldb.NewClusterManager() - d.Remote = d.setupRemoteClient() - d.Secret = secret.New(d.Remote) + d.Dash = d.listenTCPRetry("dashboard", env.EncoreDevDashListenAddr(), 9400) + d.DBProxy = d.listenTCPRetry("dbproxy", option.None[string](), 9500) + d.Runtime = d.listenTCPRetry("runtime", option.None[string](), 9600) + d.Debug = d.listenTCPRetry("debug", option.None[string](), 9700) + d.ObjectStorage = d.listenTCPRetry("objectstorage", env.EncoreObjectStorageListAddr(), 9800) + d.MCP = d.listenTCPRetry("mcp", env.EncoreMCPSSEListenAddr(), 9900) + d.EncoreDB = d.openDB() + + d.Apps = apps.NewManager(d.EncoreDB) + d.close = append(d.close, d.Apps) + + // If ENCORE_SQLDB_HOST is set, use the external cluster instead of + // creating our own docker container cluster. + var sqldbDriver sqldb.Driver = &docker.Driver{} + if host := os.Getenv("ENCORE_SQLDB_HOST"); host != "" { + sqldbDriver = &external.Driver{ + Host: host, + Database: os.Getenv("ENCORE_SQLDB_DATABASE"), + SuperuserUsername: os.Getenv("ENCORE_SQLDB_USER"), + SuperuserPassword: os.Getenv("ENCORE_SQLDB_PASSWORD"), + } + log.Info().Msgf("using external postgres cluster: %s", host) + } + + d.NS = namespace.NewManager(d.EncoreDB) + d.Secret = secret.New() + d.ClusterMgr = sqldb.NewClusterManager(sqldbDriver, d.Apps, d.NS, d.Secret) + d.ObjectsMgr = objects.NewClusterManager(d.NS) + d.PublicBuckets = objects.NewPublicBucketServer("http://"+d.ObjectStorage.ClientAddr(), d.ObjectsMgr.PersistentStoreFallback) + + traceStore := sqlite.New(d.EncoreDB) + go traceStore.CleanEvery(ctx, 1*time.Minute, 500, 100, 10000) + d.Trace = traceStore + d.RunMgr = &run.Manager{ - RuntimePort: tcpPort(d.Runtime), - DBProxyPort: tcpPort(d.DBProxy), - DashPort: tcpPort(d.Dash), - Secret: d.Secret, + RuntimePort: d.Runtime.Port(), + DBProxyPort: d.DBProxy.Port(), + DashBaseURL: fmt.Sprintf("http://%s", d.Dash.ClientAddr()), + Secret: d.Secret, + ClusterMgr: d.ClusterMgr, + ObjectsMgr: d.ObjectsMgr, + PublicBuckets: d.PublicBuckets, } - d.DashSrv = dash.NewServer(d.RunMgr, d.Trace) - d.Server = daemon.New(d.Version, d.RunMgr, d.ClusterMgr, d.Secret, d.Remote) + d.MCPMgr = mcp.NewManager( + d.Apps, + d.ClusterMgr, + d.NS, + d.Trace, + d.RunMgr, + fmt.Sprintf("http://%s", d.MCP.ClientAddr()), + ) + + // Register namespace deletion handlers. + d.NS.RegisterDeletionHandler(d.ClusterMgr) + d.NS.RegisterDeletionHandler(d.RunMgr) + d.NS.RegisterDeletionHandler(d.ObjectsMgr) + + d.Server = daemon.New(d.Apps, d.RunMgr, d.ClusterMgr, d.Secret, d.NS, d.MCPMgr) } func (d *Daemon) serve() { @@ -101,6 +191,9 @@ func (d *Daemon) serve() { go d.serveRuntime() go d.serveDBProxy() go d.serveDash() + go d.serveDebug() + go d.serveObjects() + go d.serveMCP() } // listenDaemonSocket listens on the encored.sock UNIX socket @@ -117,7 +210,7 @@ func (d *Daemon) listenDaemonSocket() *net.UnixListener { // If the daemon socket already exists, remove it so we can take over listening. if _, err := xos.SocketStat(socketPath); err == nil { - os.Remove(socketPath) + _ = os.Remove(socketPath) } ln, err := net.ListenUnix("unix", &net.UnixAddr{Name: socketPath, Net: "unix"}) if err != nil { @@ -132,34 +225,44 @@ func (d *Daemon) listenDaemonSocket() *net.UnixListener { return ln } -// setupRemoteClient sets up a grpc client to Encore's backend service. -func (d *Daemon) setupRemoteClient() remote.RemoteClient { - ts := &conf.TokenSource{} - dialOpts := []grpc.DialOption{ - grpc.WithTransportCredentials(credentials.NewTLS(nil)), - grpc.WithPerRPCCredentials(oauth.TokenSource{TokenSource: ts}), - grpc.WithKeepaliveParams(keepalive.ClientParameters{ - Time: 20 * time.Second, - }), - } - conn, err := grpc.Dial("remote.encoreapis.com:443", dialOpts...) +func failedPreconditionError(msg, typ, desc string) error { + st, err := status.New(codes.FailedPrecondition, msg).WithDetails( + &errdetails.PreconditionFailure{ + Violations: []*errdetails.PreconditionFailure_Violation{ + { + Type: typ, + Description: desc, + }, + }, + }, + ) if err != nil { - fatalf("failed to dial encore server: %v", err) + panic(err) } - d.closeOnExit(conn) - return remote.NewRemoteClient(conn) + return st.Err() +} + +func ErrInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) { + resp, err = handler(ctx, req) + if errors.Is(err, conf.ErrInvalidRefreshToken) { + return nil, failedPreconditionError("invalid refresh token", "INVALID_REFRESH_TOKEN", "invalid refresh token") + } else if errors.Is(err, conf.ErrNotLoggedIn) { + return nil, status.Error(codes.Unauthenticated, "not logged in") + } + return resp, err } func (d *Daemon) serveDaemon() { log.Info().Stringer("addr", d.Daemon.Addr()).Msg("serving daemon") - srv := grpc.NewServer() + srv := grpc.NewServer(grpc.UnaryInterceptor(ErrInterceptor)) daemonpb.RegisterDaemonServer(srv, d.Server) d.exit <- srv.Serve(d.Daemon) } func (d *Daemon) serveRuntime() { log.Info().Stringer("addr", d.Runtime.Addr()).Msg("serving runtime") - srv := runtime.NewServer(d.RunMgr, d.Trace, d.Remote) + rec := trace2.NewRecorder(d.Trace) + srv := engine.NewServer(d.RunMgr, rec) d.exit <- http.Serve(d.Runtime, srv) } @@ -168,24 +271,139 @@ func (d *Daemon) serveDBProxy() { d.exit <- d.ClusterMgr.ServeProxy(d.DBProxy) } +func (d *Daemon) serveMCP() { + log.Info().Stringer("addr", d.MCP.Addr()).Msg("serving mcp") + d.exit <- d.MCPMgr.Serve(d.MCP) +} + +func (d *Daemon) serveObjects() { + log.Info().Stringer("addr", d.ObjectStorage.Addr()).Msg("serving object storage") + d.exit <- d.PublicBuckets.Serve(d.ObjectStorage) +} + func (d *Daemon) serveDash() { log.Info().Stringer("addr", d.Dash.Addr()).Msg("serving dash") - srv := dash.NewServer(d.RunMgr, d.Trace) + srv := dash.NewServer(d.Apps, d.RunMgr, d.NS, d.Trace, d.Dash.Port()) d.exit <- http.Serve(d.Dash, srv) } -// listenTCP listens for TCP connections on a random port on localhost. -func (d *Daemon) listenTCP() *net.TCPListener { - ln, err := net.Listen("tcp", "127.0.0.1:0") +func (d *Daemon) serveDebug() { + log.Info().Stringer("addr", d.Debug.Addr()).Msg("serving debug") + + mux := http.NewServeMux() + mux.HandleFunc("/debug/pprof/", pprof.Index) + mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) + mux.HandleFunc("/debug/pprof/profile", pprof.Profile) + mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + mux.HandleFunc("/debug/pprof/trace", pprof.Trace) + d.exit <- http.Serve(d.Debug, mux) +} + +// listenTCPRetry listens for TCP connections on the given port, retrying +// in the background if it's already in use. +func (d *Daemon) listenTCPRetry(component string, addrOverride option.Option[string], defaultPort uint16) *retryingTCPListener { + addr, err := parseInterface(addrOverride.GetOrElse("127.0.0.1:0")) if err != nil { - fatal(err) + log.Fatal().Str("component", component).Err(err).Msg("failed to parse interface") + } + if addr.Port() == 0 { + addr = netip.AddrPortFrom(addr.Addr(), defaultPort) } + ln := listenLocalhostTCP(component, addr) d.closeOnExit(ln) - return ln.(*net.TCPListener) + return ln +} + +func (d *Daemon) openDB() *sql.DB { + dir, err := conf.Dir() + if err != nil { + fatal(err) + } else if err := os.MkdirAll(dir, 0755); err != nil { + fatal(err) + } + + dbPath := filepath.Join(dir, "encore.db") + + // Create the database file if it doesn't exist, as + // we've observed some failures to open the database file when it doesn't already exist. + if _, err := os.Stat(dbPath); os.IsNotExist(err) { + if f, err := os.OpenFile(dbPath, os.O_CREATE|os.O_WRONLY, 0600); err == nil { + _ = f.Close() + } + } + + db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&_journal=wal&_txlock=immediate", dbPath)) + if err != nil { + fatal(err) + } + + // Initialize db schema + if err := d.runDBMigrations(db); err != nil { + fatalf("unable to migrate management database: %v", err) + } + d.closeOnExit(db) + + return db } -func tcpPort(ln net.Listener) int { - return ln.Addr().(*net.TCPAddr).Port +//go:embed migrations +var dbMigrations embed.FS + +func (d *Daemon) runDBMigrations(db *sql.DB) error { + { + // Convert old-style schema definition to golang-migrate, if necessary. + var isLegacy bool + err := db.QueryRow(` + SELECT COUNT(*) > 0 FROM pragma_table_info('schema_migrations') WHERE name = 'dummy' + `).Scan(&isLegacy) + if err != nil { + return err + } else if isLegacy { + _, _ = db.Exec("DROP TABLE schema_migrations;") + } + } + + src, err := iofs.New(dbMigrations, "migrations") + if err != nil { + return fmt.Errorf("read db migrations: %v", err) + } + instance, err := sqlite3.WithInstance(db, &sqlite3.Config{}) + if err != nil { + return fmt.Errorf("initialize migration instance: %v", err) + } + m, err := migrate.NewWithInstance("iofs", src, "encore", instance) + if err != nil { + return fmt.Errorf("setup migrate instance: %v", err) + } + + err = m.Up() + if errors.Is(err, migrate.ErrNoChange) { + return nil + } + + // If we have a dirty migration, reset the dirty flag and try again. + // This is safe since all migrations run inside transactions. + var dirty migrate.ErrDirty + if errors.As(err, &dirty) { + // Find the version that preceded the dirty version so + // we can force the migration to that version and then + // re-apply the migration. + var prevVer uint + prevVer, err = src.Prev(uint(dirty.Version)) + targetVer := int(prevVer) + if errors.Is(err, fs.ErrNotExist) { + // No previous migration exists + targetVer = database.NilVersion + } else if err != nil { + return errors.Wrap(err, "failed to find previous version") + } + + if err = m.Force(targetVer); err == nil { + err = m.Up() + } + } + + return err } // detectSocketClose polls for the unix socket at socketPath to be removed @@ -205,7 +423,7 @@ func detectSocketClose(ln *net.UnixListener, socketPath string) error { for { time.Sleep(200 * time.Millisecond) fi, err := xos.SocketStat(socketPath) - if os.IsNotExist(err) { + if errors.Is(err, fs.ErrNotExist) { // Socket was removed; don't remove it again return nil } else if err != nil { @@ -228,7 +446,7 @@ func (d *Daemon) closeOnExit(c io.Closer) { func (d *Daemon) closeAll() { for _, c := range d.close { - c.Close() + _ = c.Close() } } @@ -253,3 +471,199 @@ func handleBailout(err *error) { } } } + +// redirectLogOutput redirects the global logger to also write to a file. +func redirectLogOutput() error { + logPath := env.EncoreDaemonLogPath() + if err := os.MkdirAll(filepath.Dir(logPath), 0755); err != nil { + return err + } + f, err := os.OpenFile(logPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600) + if err != nil { + return err + } + log.Info().Msgf("writing output to %s", logPath) + + zerolog.TimeFieldFormat = time.RFC3339Nano + consoleWriter := zerolog.ConsoleWriter{ + Out: os.Stderr, + FieldsExclude: []string{zerolog.ErrorStackFieldName}, + } + consoleWriter.FormatExtra = eerror.ZeroLogConsoleExtraFormatter + consoleWriter.TimeFormat = time.TimeOnly + zerolog.ErrorStackMarshaler = eerror.ZeroLogStackMarshaller + log.Logger = log.With().Caller().Stack().Logger().Output(io.MultiWriter(consoleWriter, f)) + return nil +} + +// retryingTCPListener is a TCP listener that attempts multiple times +// to listen on a given port. It is designed to handle race conditions +// between multiple daemon processes handing off to each other +// and the port still being in use momentarily. +type retryingTCPListener struct { + component string + addr netip.AddrPort + ctx context.Context + cancel func() // call to cancel ctx + + // doneListening is closed when the underlying listener is open, + // or it gave up due to an error. + doneListening chan struct{} + underlying net.Listener + listenErr error +} + +func listenLocalhostTCP(component string, addr netip.AddrPort) *retryingTCPListener { + ctx, cancel := context.WithCancel(context.Background()) + ln := &retryingTCPListener{ + component: component, + addr: addr, + ctx: ctx, + cancel: cancel, + doneListening: make(chan struct{}), + } + go ln.listen() + return ln +} + +func (ln *retryingTCPListener) Accept() (net.Conn, error) { + select { + case <-ln.ctx.Done(): + return nil, net.ErrClosed + case <-ln.doneListening: + if ln.listenErr != nil { + return nil, ln.listenErr + } + return ln.underlying.Accept() + } +} + +func (ln *retryingTCPListener) Close() error { + ln.cancel() + select { + case <-ln.doneListening: + if ln.listenErr == nil { + return ln.underlying.Close() + } + default: + } + return nil +} + +func (ln *retryingTCPListener) Addr() net.Addr { + return &net.TCPAddr{IP: net.IP(ln.addr.Addr().AsSlice()), Port: int(ln.addr.Port())} +} + +func (ln *retryingTCPListener) ClientAddr() string { + // If our addr is 0.0.0.0 or the ipv6 equivalent, return 127.0.0.1 instead + // so that clients can connect to us. + if ln.addr.Addr().IsUnspecified() { + if ln.addr.Addr().Is6() { + return fmt.Sprintf("[::1]:%d", ln.addr.Port()) + } + return fmt.Sprintf("127.0.0.1:%d", ln.addr.Port()) + } + return ln.addr.String() +} + +func (ln *retryingTCPListener) Port() int { + return int(ln.addr.Port()) +} + +func (ln *retryingTCPListener) listen() { + defer close(ln.doneListening) + + logger := log.With().Str("component", ln.component).Int("port", ln.Port()).Logger() + addr := ln.addr.String() + + b := backoff.NewExponentialBackOff() + b.InitialInterval = 50 * time.Millisecond + b.MaxInterval = 500 * time.Millisecond + b.MaxElapsedTime = 5 * time.Second + + ln.listenErr = backoff.Retry(func() (err error) { + if err := ln.ctx.Err(); err != nil { + return backoff.Permanent(err) + } + ln.underlying, err = net.Listen("tcp", addr) + if err != nil { + logger.Error().Err(ln.listenErr).Msg("unable to listen, retrying") + } + return err + }, b) + + if ln.listenErr != nil { + logger.Error().Err(ln.listenErr).Msg("unable to listen, giving up") + } else { + logger.Info().Msg("listening on port") + } +} + +func parseInterface(s string) (netip.AddrPort, error) { + addr, portStr, _, err := splitAddrPort(s) + if err != nil { + return netip.AddrPort{}, err + } + + port, err := strconv.ParseUint(portStr, 10, 16) + if err != nil { + return netip.AddrPort{}, err + } + + // Is addr a valid ip? If so we're done. + if ip, err := netip.ParseAddr(addr); err == nil { + return netip.AddrPortFrom(ip, uint16(port)), nil + } + + // Otherwise perform name resolution. + ips, err := net.LookupIP(addr) + if err != nil { + return netip.AddrPort{}, err + } + if len(ips) == 0 { + return netip.AddrPort{}, fmt.Errorf("no IP addresses found for %s", addr) + } + + // Prefer IPv4 addresses. + for _, ip := range ips { + if ip.To4() != nil { + if addr, err := netip.ParseAddr(ip.String()); err == nil { + return netip.AddrPortFrom(addr, uint16(port)), nil + } + } + } + + if addr, err := netip.ParseAddr(ips[0].String()); err == nil { + return netip.AddrPortFrom(addr, uint16(port)), nil + } + return netip.AddrPort{}, fmt.Errorf("unable to parse IP address %s", addr) +} + +// splitAddrPort splits s into an IP address string and a port +// string. It splits strings shaped like "foo:bar" or "[foo]:bar", +// without further validating the substrings. v6 indicates whether the +// ip string should parse as an IPv6 address or an IPv4 address, in +// order for s to be a valid ip:port string. +func splitAddrPort(s string) (ip, port string, v6 bool, err error) { + i := strings.LastIndexByte(s, ':') + if i == -1 { + return "", "", false, errors.New("not an ip:port") + } + + ip, port = s[:i], s[i+1:] + if len(ip) == 0 { + return "", "", false, errors.New("no IP") + } + if len(port) == 0 { + return "", "", false, errors.New("no port") + } + if ip[0] == '[' { + if len(ip) < 2 || ip[len(ip)-1] != ']' { + return "", "", false, errors.New("missing ]") + } + ip = ip[1 : len(ip)-1] + v6 = true + } + + return ip, port, v6, nil +} diff --git a/cli/cmd/encore/daemon/migrations/1_initial_schema.up.sql b/cli/cmd/encore/daemon/migrations/1_initial_schema.up.sql new file mode 100644 index 0000000000..d28954ba14 --- /dev/null +++ b/cli/cmd/encore/daemon/migrations/1_initial_schema.up.sql @@ -0,0 +1,40 @@ +CREATE TABLE IF NOT EXISTS app ( + root TEXT PRIMARY KEY, + local_id TEXT NOT NULL, + platform_id TEXT NULL, -- NULL if not linked + updated_at TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS trace_event ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + app_id TEXT NOT NULL, -- platform_id or local_id + trace_id TEXT NOT NULL, + span_id TEXT NOT NULL, + event_data TEXT NOT NULL -- json +); + +CREATE INDEX IF NOT EXISTS trace_event_span_key ON trace_event (trace_id, span_id); + +CREATE TABLE IF NOT EXISTS trace_span_index ( + trace_id TEXT NOT NULL, + span_id TEXT NOT NULL, + app_id TEXT NOT NULL, -- platform_id or local_id + span_type INTEGER NOT NULL, -- enum + + -- request fields + started_at INTEGER NULL, -- unix nanosecond + is_root BOOLEAN NULL, + service_name TEXT NULL, + endpoint_name TEXT NULL, + topic_name TEXT NULL, + subscription_name TEXT NULL, + message_id TEXT NULL, + external_request_id TEXT NULL, + + -- response fields + has_response BOOLEAN NOT NULL, + is_error BOOLEAN NULL, + duration_nanos INTEGER NULL, + user_id TEXT NULL, + PRIMARY KEY (trace_id, span_id) +); diff --git a/cli/cmd/encore/daemon/migrations/2_infra_namespaces.up.sql b/cli/cmd/encore/daemon/migrations/2_infra_namespaces.up.sql new file mode 100644 index 0000000000..dcdbb05359 --- /dev/null +++ b/cli/cmd/encore/daemon/migrations/2_infra_namespaces.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS namespace ( + id TEXT PRIMARY KEY, -- uuid + app_id TEXT NOT NULL, -- platform_id or local_id + name TEXT NOT NULL, + active BOOL NOT NULL DEFAULT FALSE, + created_at TIMESTAMP NOT NULL, + last_active_at TIMESTAMP NULL, + UNIQUE (app_id, name) +); + +-- Ensure there's a single active namespace per app. +CREATE UNIQUE INDEX active_namespace ON namespace (app_id) WHERE active = true; diff --git a/cli/cmd/encore/daemon/migrations/3_test_tracing.up.sql b/cli/cmd/encore/daemon/migrations/3_test_tracing.up.sql new file mode 100644 index 0000000000..0ec669f547 --- /dev/null +++ b/cli/cmd/encore/daemon/migrations/3_test_tracing.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE trace_span_index ADD COLUMN test_skipped BOOLEAN NOT NULL DEFAULT FALSE; +ALTER TABLE trace_span_index ADD COLUMN src_file TEXT NULL; +ALTER TABLE trace_span_index ADD COLUMN src_line INTEGER NULL; diff --git a/cli/cmd/encore/db.go b/cli/cmd/encore/db.go index 893fbc5e1f..bcb9b8d137 100644 --- a/cli/cmd/encore/db.go +++ b/cli/cmd/encore/db.go @@ -10,9 +10,14 @@ import ( "runtime" "strings" - daemonpb "encr.dev/proto/encore/daemon" "github.com/rs/zerolog/log" "github.com/spf13/cobra" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/daemon/sqldb/docker" + daemonpb "encr.dev/proto/encore/daemon" ) var dbCmd = &cobra.Command{ @@ -20,53 +25,88 @@ var dbCmd = &cobra.Command{ Short: "Database management commands", } -var resetAll bool +var ( + resetAll bool + testDB bool + shadowDB bool + write bool + admin bool + superuser bool + nsName string +) + +func getDBRole() daemonpb.DBRole { + switch { + case superuser: + return daemonpb.DBRole_DB_ROLE_SUPERUSER + case admin: + return daemonpb.DBRole_DB_ROLE_ADMIN + case write: + return daemonpb.DBRole_DB_ROLE_WRITE + default: + return daemonpb.DBRole_DB_ROLE_READ + } +} var dbResetCmd = &cobra.Command{ - Use: "reset [servicenames...]", - Short: "Resets the databases for the given services, or the current directory if unspecified", + Use: "reset ", + Short: "Resets the databases with the given names. Use --all to reset all databases.", Run: func(command *cobra.Command, args []string) { - appRoot, relPath := determineAppRoot() - svcNames := args - if resetAll && len(svcNames) > 0 { - fatal("cannot specify both --all and service names") - } - if !resetAll && len(svcNames) == 0 { - pkgs, err := resolvePackages(filepath.Join(appRoot, relPath), ".") - if err != nil { - log.Fatal().Err(err).Msg("could not resolve packages") + appRoot, _ := determineAppRoot() + dbNames := args + if resetAll { + if len(dbNames) > 0 { + fatal("cannot specify both --all and database names") + } + dbNames = nil + } else { + if len(dbNames) == 0 { + fatal("no database names given") } - svcNames = []string{filepath.Base(pkgs[0])} } ctx := context.Background() daemon := setupDaemon(ctx) stream, err := daemon.DBReset(ctx, &daemonpb.DBResetRequest{ - AppRoot: appRoot, - Services: svcNames, + AppRoot: appRoot, + DatabaseNames: dbNames, + ClusterType: dbClusterType(), + Namespace: nonZeroPtr(nsName), }) if err != nil { fatal("reset databases: ", err) } - streamCommandOutput(stream) + os.Exit(cmdutil.StreamCommandOutput(stream, nil)) }, } var dbEnv string var dbShellCmd = &cobra.Command{ - Use: "shell [service-name]", + Use: "shell DATABASE_NAME [--env=] [--test|--shadow]", Short: "Connects to the database via psql shell", - Args: cobra.MaximumNArgs(1), + Long: `Defaults to connecting to your local environment. +Specify --env to connect to another environment. + +Use --test to connect to databases used for integration testing. +Use --shadow to connect to the shadow database, used for database drift detection +when using tools like Prisma. +--test and --shadow imply --env=local. +`, + Args: cobra.MaximumNArgs(1), + + DisableFlagsInUseLine: true, Run: func(command *cobra.Command, args []string) { appRoot, relPath := determineAppRoot() ctx := context.Background() daemon := setupDaemon(ctx) - svcName := "" + dbName := "" if len(args) > 0 { - svcName = args[0] + dbName = args[0] + // Ignore the trailing slash to support auto-completion of directory names + dbName = strings.TrimSuffix(dbName, "/") } else { // Find the enclosing service by looking for the "migrations" folder SvcNameLoop: @@ -75,24 +115,31 @@ var dbShellCmd = &cobra.Command{ if _, err := os.Stat(filepath.Join(absPath, "migrations")); err == nil { pkgs, err := resolvePackages(absPath, ".") if err == nil && len(pkgs) > 0 { - svcName = filepath.Base(pkgs[0]) + dbName = filepath.Base(pkgs[0]) break SvcNameLoop } } } - if svcName == "" { + if dbName == "" { fatal("could not find an Encore service with a database in this directory (or any of the parent directories).\n\n" + - "Note: You can specify a service name to connect to it directly using the command 'encore db shell '.") + "Note: You can specify a service name to connect to it directly using the command 'encore db shell '.") } } + if testDB || shadowDB { + dbEnv = "local" + } + resp, err := daemon.DBConnect(ctx, &daemonpb.DBConnectRequest{ - AppRoot: appRoot, - SvcName: svcName, - EnvName: dbEnv, + AppRoot: appRoot, + DbName: dbName, + EnvName: dbEnv, + ClusterType: dbClusterType(), + Namespace: nonZeroPtr(nsName), + Role: getDBRole(), }) if err != nil { - fatalf("could not connect to the database for service %s: %v", svcName, err) + fatalf("could not connect to the database for service %s: %v", dbName, err) } // If we have the psql binary, use that. @@ -104,15 +151,15 @@ var dbShellCmd = &cobra.Command{ fmt.Fprintln(os.Stderr, "encore: no 'psql' executable found in $PATH; using docker to run 'psql' instead.\n\nNote: install psql to hide this message.") dsn := resp.Dsn - if runtime.GOOS == "darwin" { - // Docker for Mac's networking setup requires + if runtime.GOOS == "darwin" || runtime.GOOS == "windows" { + // Docker for {Mac, Windows}'s networking setup requires // using "host.docker.internal" instead of "localhost" for _, rep := range []string{"localhost", "127.0.0.1"} { dsn = strings.Replace(dsn, rep, "host.docker.internal", -1) } } - cmd = exec.Command("docker", "run", "-it", "--rm", "--network=host", "postgres", "psql", dsn) + cmd = exec.Command("docker", "run", "-it", "--rm", "--network=host", docker.Image, "psql", dsn) } cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr @@ -126,13 +173,27 @@ var dbShellCmd = &cobra.Command{ log.Fatal().Err(err).Msg("psql failed") } }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + if len(args) > 0 { + return nil, cobra.ShellCompDirectiveNoFileComp + } + return nil, cobra.ShellCompDirectiveFilterDirs + }, } var dbProxyPort int32 var dbProxyCmd = &cobra.Command{ - Use: "proxy [--env=]", + Use: "proxy [--env=] [--test|--shadow]", Short: "Sets up a proxy tunnel to the database", + Long: `Set up a proxy tunnel to a database for use with other tools. + +Use --test to connect to databases used for integration testing. +Use --shadow to connect to the shadow database, used for database drift detection +when using tools like Prisma. + +--test and --shadow imply --env=local. +`, Run: func(command *cobra.Command, args []string) { appRoot, _ := determineAppRoot() @@ -145,76 +206,142 @@ var dbProxyCmd = &cobra.Command{ cancel() }() + if testDB || shadowDB { + dbEnv = "local" + } + daemon := setupDaemon(ctx) stream, err := daemon.DBProxy(ctx, &daemonpb.DBProxyRequest{ - AppRoot: appRoot, - EnvName: dbEnv, - Port: dbProxyPort, + AppRoot: appRoot, + EnvName: dbEnv, + Port: dbProxyPort, + ClusterType: dbClusterType(), + Namespace: nonZeroPtr(nsName), + Role: getDBRole(), }) if err != nil { log.Fatal().Err(err).Msg("could not setup db proxy") } - streamCommandOutput(stream) + os.Exit(cmdutil.StreamCommandOutput(stream, nil)) }, } var dbConnURICmd = &cobra.Command{ - Use: "conn-uri [servicename]", + Use: "conn-uri [] [--test|--shadow]", Short: "Outputs the database connection string", - Args: cobra.MaximumNArgs(1), + Long: `Retrieve a stable connection uri for connecting to a database. + +Use --test to connect to databases used for integration testing. +Use --shadow to connect to the shadow database, used for database drift detection +when using tools like Prisma. + +--test and --shadow imply --env=local. +`, + Args: cobra.MaximumNArgs(1), Run: func(command *cobra.Command, args []string) { appRoot, relPath := determineAppRoot() ctx := context.Background() daemon := setupDaemon(ctx) - svcName := "" + dbName := "" if len(args) > 0 { - svcName = args[0] + dbName = args[0] } else { // Find the enclosing service by looking for the "migrations" folder - SvcNameLoop: + DBNameLoop: for p := relPath; p != "."; p = filepath.Dir(p) { absPath := filepath.Join(appRoot, p) if _, err := os.Stat(filepath.Join(absPath, "migrations")); err == nil { pkgs, err := resolvePackages(absPath, ".") if err == nil && len(pkgs) > 0 { - svcName = filepath.Base(pkgs[0]) - break SvcNameLoop + dbName = filepath.Base(pkgs[0]) + break DBNameLoop } } } - if svcName == "" { + if dbName == "" { fatal("could not find Encore service with a database in this directory (or any parent directory).\n\n" + "Note: You can specify a service name to connect to it directly using the command 'encore db conn-uri '.") } } + if testDB || shadowDB { + dbEnv = "local" + } + resp, err := daemon.DBConnect(ctx, &daemonpb.DBConnectRequest{ - AppRoot: appRoot, - SvcName: svcName, - EnvName: dbEnv, + AppRoot: appRoot, + DbName: dbName, + EnvName: dbEnv, + ClusterType: dbClusterType(), + Namespace: nonZeroPtr(nsName), + Role: getDBRole(), }) if err != nil { - fatalf("could not connect to the database for service %s: %v", svcName, err) + st, ok := status.FromError(err) + if ok { + if st.Code() == codes.NotFound { + fatalf("no such database found: %s", dbName) + } + } + fatalf("could not connect to the database for service %s: %v", dbName, err) } - fmt.Fprintln(os.Stdout, resp.Dsn) + _, _ = fmt.Fprintln(os.Stdout, resp.Dsn) }, } func init() { rootCmd.AddCommand(dbCmd) + dbResetCmd.Flags().StringVarP(&nsName, "namespace", "n", "", "Namespace to use (defaults to active namespace)") dbResetCmd.Flags().BoolVar(&resetAll, "all", false, "Reset all services in the application") + dbResetCmd.Flags().BoolVarP(&testDB, "test", "t", false, "Reset databases in the test cluster instead") + dbResetCmd.Flags().BoolVar(&shadowDB, "shadow", false, "Reset databases in the shadow cluster instead") dbCmd.AddCommand(dbResetCmd) + dbShellCmd.Flags().StringVarP(&nsName, "namespace", "n", "", "Namespace to use (defaults to active namespace)") dbShellCmd.Flags().StringVarP(&dbEnv, "env", "e", "local", "Environment name to connect to (such as \"prod\")") + dbShellCmd.Flags().BoolVarP(&testDB, "test", "t", false, "Connect to the integration test database (implies --env=local)") + dbShellCmd.Flags().BoolVar(&shadowDB, "shadow", false, "Connect to the shadow database (implies --env=local)") + dbShellCmd.Flags().BoolVar(&write, "write", false, "Connect with write privileges") + dbShellCmd.Flags().BoolVar(&admin, "admin", false, "Connect with admin privileges") + dbShellCmd.Flags().BoolVar(&superuser, "superuser", false, "Connect as a superuser") + dbShellCmd.MarkFlagsMutuallyExclusive("write", "admin", "superuser") dbCmd.AddCommand(dbShellCmd) + dbProxyCmd.Flags().StringVarP(&nsName, "namespace", "n", "", "Namespace to use (defaults to active namespace)") dbProxyCmd.Flags().StringVarP(&dbEnv, "env", "e", "local", "Environment name to connect to (such as \"prod\")") dbProxyCmd.Flags().Int32VarP(&dbProxyPort, "port", "p", 0, "Port to listen on (defaults to a random port)") + dbProxyCmd.Flags().BoolVarP(&testDB, "test", "t", false, "Connect to the integration test database (implies --env=local)") + dbProxyCmd.Flags().BoolVar(&shadowDB, "shadow", false, "Connect to the shadow database (implies --env=local)") + dbProxyCmd.Flags().BoolVar(&write, "write", false, "Connect with write privileges") + dbProxyCmd.Flags().BoolVar(&admin, "admin", false, "Connect with admin privileges") + dbProxyCmd.Flags().BoolVar(&superuser, "superuser", false, "Connect as a superuser") + dbProxyCmd.MarkFlagsMutuallyExclusive("write", "admin", "superuser") dbCmd.AddCommand(dbProxyCmd) + dbConnURICmd.Flags().StringVarP(&nsName, "namespace", "n", "", "Namespace to use (defaults to active namespace)") dbConnURICmd.Flags().StringVarP(&dbEnv, "env", "e", "local", "Environment name to connect to (such as \"prod\")") + dbConnURICmd.Flags().BoolVarP(&testDB, "test", "t", false, "Connect to the integration test database (implies --env=local)") + dbConnURICmd.Flags().BoolVar(&shadowDB, "shadow", false, "Connect to the shadow database (implies --env=local)") + dbConnURICmd.Flags().BoolVar(&write, "write", false, "Connect with write privileges") + dbConnURICmd.Flags().BoolVar(&admin, "admin", false, "Connect with admin privileges") + dbConnURICmd.Flags().BoolVar(&superuser, "superuser", false, "Connect as a superuser") + dbConnURICmd.MarkFlagsMutuallyExclusive("write", "admin", "superuser") dbCmd.AddCommand(dbConnURICmd) } + +func dbClusterType() daemonpb.DBClusterType { + if testDB && shadowDB { + fatal("cannot specify both --test and --shadow") + } + switch { + case testDB: + return daemonpb.DBClusterType_DB_CLUSTER_TYPE_TEST + case shadowDB: + return daemonpb.DBClusterType_DB_CLUSTER_TYPE_SHADOW + default: + return daemonpb.DBClusterType_DB_CLUSTER_TYPE_RUN + } +} diff --git a/cli/cmd/encore/debug.go b/cli/cmd/encore/debug.go new file mode 100644 index 0000000000..5393e043d8 --- /dev/null +++ b/cli/cmd/encore/debug.go @@ -0,0 +1,83 @@ +package main + +import ( + "context" + "os" + "os/signal" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + daemonpb "encr.dev/proto/encore/daemon" +) + +func init() { + debugCmd := &cobra.Command{ + Use: "debug", + Short: "debug is a collection of debug commands", + Hidden: true, + } + + format := cmdutil.Oneof{ + Value: "proto", + Allowed: []string{"proto", "json"}, + Flag: "format", + FlagShort: "f", + Desc: "Output format", + } + + toFormat := func() daemonpb.DumpMetaRequest_Format { + switch format.Value { + case "proto": + return daemonpb.DumpMetaRequest_FORMAT_PROTO + case "json": + return daemonpb.DumpMetaRequest_FORMAT_JSON + default: + return daemonpb.DumpMetaRequest_FORMAT_UNSPECIFIED + } + } + + var p dumpMetaParams + dumpMeta := &cobra.Command{ + Use: "meta", + Short: "Outputs the parsed metadata", + Args: cobra.NoArgs, + Run: func(cmd *cobra.Command, args []string) { + p.AppRoot, p.WorkingDir = determineAppRoot() + p.Environ = os.Environ() + p.Format = toFormat() + dumpMeta(p) + }, + } + + format.AddFlag(dumpMeta) + dumpMeta.Flags().BoolVar(&p.ParseTests, "tests", false, "Parse tests as well") + rootCmd.AddCommand(debugCmd) + debugCmd.AddCommand(dumpMeta) +} + +type dumpMetaParams struct { + AppRoot string + WorkingDir string + ParseTests bool + Format daemonpb.DumpMetaRequest_Format + Environ []string +} + +func dumpMeta(p dumpMetaParams) { + ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) + defer cancel() + + daemon := setupDaemon(ctx) + resp, err := daemon.DumpMeta(ctx, &daemonpb.DumpMetaRequest{ + AppRoot: p.AppRoot, + WorkingDir: p.WorkingDir, + ParseTests: p.ParseTests, + Environ: p.Environ, + Format: p.Format, + }) + if err != nil { + fatal(err) + } + _, _ = os.Stdout.Write(resp.Meta) +} diff --git a/cli/cmd/encore/deploy.go b/cli/cmd/encore/deploy.go new file mode 100644 index 0000000000..02db03158a --- /dev/null +++ b/cli/cmd/encore/deploy.go @@ -0,0 +1,104 @@ +package main + +import ( + "encoding/hex" + "encoding/json" + "fmt" + "strings" + + "github.com/cockroachdb/errors" + "github.com/logrusorgru/aurora/v3" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" + "encr.dev/pkg/appfile" +) + +var ( + appSlug string + envName string + commit string + branch string + format = cmdutil.Oneof{ + Value: "text", + Allowed: []string{"text", "json"}, + Flag: "format", + FlagShort: "f", + Desc: "Output format", + } +) + +var deployAppCmd = &cobra.Command{ + Use: "deploy --commit COMMIT_SHA | --branch BRANCH_NAME", + Short: "Deploy an Encore app to a cloud environment", + DisableFlagsInUseLine: true, + Run: func(c *cobra.Command, args []string) { + if commit != "" { + hb, err := hex.DecodeString(commit) + if err != nil || len(hb) != 20 { + cmdutil.Fatalf("invalid commit: %s", commit) + } + } + if appSlug == "" { + appRoot, _, err := cmdutil.MaybeAppRoot() + if err != nil { + cmdutil.Fatalf("no app found. Run deploy inside an encore app directory or specify the app with --app") + } + appSlug, err = appfile.Slug(appRoot) + if err != nil { + cmdutil.Fatalf("no app found. Run deploy inside an encore app directory or specify the app with --app") + } + } + rollout, err := platform.Deploy(c.Context(), appSlug, envName, commit, branch) + var pErr platform.Error + if ok := errors.As(err, &pErr); ok { + switch pErr.Code { + case "app_not_found": + cmdutil.Fatalf("app not found: %s", appSlug) + case "validation": + var details platform.ValidationDetails + err := json.Unmarshal(pErr.Detail, &details) + if err != nil { + cmdutil.Fatalf("failed to deploy: %v", err) + } + switch details.Field { + case "commit": + cmdutil.Fatalf("could not find commit: %s. Is it pushed to the remote repository?", commit) + case "branch": + cmdutil.Fatalf("could not find branch: %s. Is it pushed to the remote repository?", branch) + case "env": + cmdutil.Fatalf("could not find environment: %s/%s", appSlug, envName) + } + } + } + if err != nil { + cmdutil.Fatalf("failed to deploy: %v", err) + } + url := fmt.Sprintf("https://app.encore.cloud/%s/deploys/%s/%s", appSlug, rollout.EnvName, strings.TrimPrefix(rollout.ID, "roll_")) + switch format.Value { + case "text": + fmt.Println(aurora.Sprintf("\n%s %s\n", aurora.Bold("Started Deploy:"), url)) + case "json": + output, _ := json.Marshal(map[string]string{ + "id": strings.TrimPrefix(rollout.ID, "roll_"), + "env": rollout.EnvName, + "app": appSlug, + "url": url, + }) + fmt.Println(string(output)) + } + }, +} + +func init() { + alphaCmd.AddCommand(deployAppCmd) + deployAppCmd.Flags().StringVar(&appSlug, "app", "", "app slug to deploy to (default current app)") + deployAppCmd.Flags().StringVarP(&envName, "env", "e", "", "environment to deploy to (default primary env)") + deployAppCmd.Flags().StringVar(&commit, "commit", "", "commit to deploy") + deployAppCmd.Flags().StringVar(&branch, "branch", "", "branch to deploy") + format.AddFlag(deployAppCmd) + _ = deployAppCmd.MarkFlagRequired("env") + deployAppCmd.MarkFlagsMutuallyExclusive("commit", "branch") + deployAppCmd.MarkFlagsOneRequired("commit", "branch") +} diff --git a/cli/cmd/encore/exec.go b/cli/cmd/encore/exec.go new file mode 100644 index 0000000000..6ecaca198c --- /dev/null +++ b/cli/cmd/encore/exec.go @@ -0,0 +1,82 @@ +package main + +import ( + "context" + "os" + "os/signal" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + daemonpb "encr.dev/proto/encore/daemon" +) + +var execCmd = &cobra.Command{ + Use: "exec path/to/script [args...]", + Short: "Runs executable scripts against the local Encore app", + Run: func(cmd *cobra.Command, args []string) { + if len(args) == 0 { + args = []string{"."} // current directory + } + appRoot, wd := determineAppRoot() + execScript(appRoot, wd, args) + }, +} +var execCmdAlpha = &cobra.Command{ + Use: "exec path/to/script [args...]", + Short: "Runs executable scripts against the local Encore app", + Hidden: true, + Deprecated: "use \"encore exec\" instead", + Run: func(cmd *cobra.Command, args []string) { + if len(args) == 0 { + args = []string{"."} // current directory + } + appRoot, wd := determineAppRoot() + execScript(appRoot, wd, args) + }, +} + +func execScript(appRoot, relWD string, args []string) { + interrupt := make(chan os.Signal, 1) + signal.Notify(interrupt, os.Interrupt) + + ctx, cancel := context.WithCancel(context.Background()) + go func() { + <-interrupt + cancel() + }() + + daemon := setupDaemon(ctx) + stream, err := daemon.ExecScript(ctx, &daemonpb.ExecScriptRequest{ + AppRoot: appRoot, + WorkingDir: relWD, + ScriptArgs: args, + Environ: os.Environ(), + TraceFile: root.TraceFile, + Namespace: nonZeroPtr(nsName), + }) + if err != nil { + fatal(err) + } + + cmdutil.ClearTerminalExceptFirstNLines(1) + code := cmdutil.StreamCommandOutput(stream, cmdutil.ConvertJSONLogs()) + os.Exit(code) +} + +var alphaCmd = &cobra.Command{ + Use: "alpha", + Short: "Pre-release functionality in alpha stage", + Hidden: true, +} + +func init() { + rootCmd.AddCommand(alphaCmd) +} + +func init() { + execCmd.Flags().StringVarP(&nsName, "namespace", "n", "", "Namespace to use (defaults to active namespace)") + alphaCmd.AddCommand(execCmdAlpha) + rootCmd.AddCommand(execCmd) +} diff --git a/cli/cmd/encore/gen.go b/cli/cmd/encore/gen.go index b18cac212b..7f9718c213 100644 --- a/cli/cmd/encore/gen.go +++ b/cli/cmd/encore/gen.go @@ -2,14 +2,18 @@ package main import ( "context" - "io/ioutil" + "errors" + "fmt" "os" - "path/filepath" - "strings" "time" - daemonpb "encr.dev/proto/encore/daemon" "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/manifest" + "encr.dev/pkg/appfile" + "encr.dev/pkg/clientgen" + daemonpb "encr.dev/proto/encore/daemon" ) func init() { @@ -20,64 +24,187 @@ func init() { rootCmd.AddCommand(genCmd) var ( - output string - lang string - envName string + output string + lang string + envName string + genServiceNames []string + excludedServices []string + endpointTags []string + excludedEndpointTags []string + openAPIExcludePrivateEndpoints bool + tsSharedTypes bool + target string + tsDefaultClient string ) genClientCmd := &cobra.Command{ - Use: "client ", + Use: "client [] [--env=] [--services=foo,bar] [--excluded-services=baz,qux] [--tags=cache,mobile] [--excluded-tags=internal] [--openapi-exclude-private-endpoints]", Short: "Generates an API client for your app", - Args: cobra.ExactArgs(1), + Long: `Generates an API client for your app. + +By default generates the API based on your local environment. +Use '--env=' to generate it based on your cloud environments. + +Supported language codes are: + typescript: A TypeScript client using the Fetch API + javascript: A JavaScript client using the Fetch API + go: A Go client using net/http" + openapi: An OpenAPI specification (EXPERIMENTAL) + +By default all services with a non-private API endpoint are included. +To further narrow down the services to generate, use the '--services' flag. +`, + Args: cobra.MaximumNArgs(1), Run: func(cmd *cobra.Command, args []string) { + if target == "leap" { + lang = "typescript" + tsDefaultClient = "import.meta.env.VITE_CLIENT_TARGET" + if output == "" { + output = "../frontend/client.ts" + } + excludedServices = append(excludedServices, "frontend") + tsSharedTypes = true + } + if output == "" && lang == "" { fatal("specify at least one of --output or --lang.") } - appID := args[0] + + // Determine the app id, either from the argument or from the current directory. + var appID string + if len(args) == 0 { + // First check the encore.app file. + appRoot, _, err := cmdutil.MaybeAppRoot() + if err != nil && !errors.Is(err, cmdutil.ErrNoEncoreApp) { + fatal(err) + } else if appRoot != "" { + if slug, err := appfile.Slug(appRoot); err == nil { + appID = slug + } + } + + // If we still don't have an app id, read it from the manifest. + if appID == "" { + mf, err := manifest.ReadOrCreate(appRoot) + if err != nil { + fatal(err) + } + appID = mf.AppID + if appID == "" { + appID = mf.LocalID + } + } + } else { + appID = args[0] + } if lang == "" { var ok bool - lang, ok = detectLang(output) + l, ok := clientgen.Detect(output) if !ok { fatal("could not detect language from output.\n\nNote: you can specify the language explicitly with --lang.") } + lang = string(l) + } else { + // Validate the user input for the language + l, err := clientgen.GetLang(lang) + if err != nil { + fatal(fmt.Sprintf("%s: supported languages are `typescript`, `javascript`, `go` and `openapi`", err)) + } + lang = string(l) } - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) defer cancel() daemon := setupDaemon(ctx) + + if genServiceNames == nil { + genServiceNames = []string{"*"} + } + resp, err := daemon.GenClient(ctx, &daemonpb.GenClientRequest{ - AppId: appID, - EnvName: envName, - Lang: lang, + AppId: appID, + EnvName: envName, + Lang: lang, + Services: genServiceNames, + ExcludedServices: excludedServices, + EndpointTags: endpointTags, + ExcludedEndpointTags: excludedEndpointTags, + OpenapiExcludePrivateEndpoints: &openAPIExcludePrivateEndpoints, + TsSharedTypes: &tsSharedTypes, + TsClientTarget: &tsDefaultClient, }) if err != nil { fatal(err) } if output == "" { - os.Stdout.Write(resp.Code) + _, _ = os.Stdout.Write(resp.Code) } else { - if err := ioutil.WriteFile(output, resp.Code, 0755); err != nil { + if err := os.WriteFile(output, resp.Code, 0755); err != nil { fatal(err) } } }, + + ValidArgsFunction: cmdutil.AutoCompleteAppSlug, + } + + genWrappersCmd := &cobra.Command{ + Use: "wrappers", + Short: "Generates user-facing wrapper code", + Long: `Manually regenerates user-facing wrapper code. + +This is typically not something you ever need to call during regular development, +as Encore automatically regenerates the wrappers whenever the code-base changes. + +Its core use case is for CI/CD workflows where you want to run custom linters, +which may require the user-facing wrapper code to be manually generated.`, + Args: cobra.ExactArgs(0), + Run: func(cmd *cobra.Command, args []string) { + appRoot, _ := determineAppRoot() + ctx := context.Background() + daemon := setupDaemon(ctx) + _, err := daemon.GenWrappers(ctx, &daemonpb.GenWrappersRequest{ + AppRoot: appRoot, + }) + if err != nil { + fatal(err) + } else { + fmt.Println("successfully generated encore wrappers.") + } + }, } genCmd.AddCommand(genClientCmd) + genCmd.AddCommand(genWrappersCmd) + + genClientCmd.Flags().StringVarP(&lang, "lang", "l", "", "The language to generate code for (\"typescript\", \"javascript\", \"go\", and \"openapi\" are supported)") + _ = genClientCmd.RegisterFlagCompletionFunc("lang", cmdutil.AutoCompleteFromStaticList( + "typescript\tA TypeScript client using the in-browser Fetch API", + "javascript\tA JavaScript client using the in-browser Fetch API", + "go\tA Go client using net/http", + "openapi\tAn OpenAPI specification", + )) + genClientCmd.Flags().StringVarP(&output, "output", "o", "", "The filename to write the generated client code to") - genClientCmd.Flags().StringVarP(&lang, "lang", "l", "", "The language to generate code for (only \"ts\" is supported for now)") - genClientCmd.Flags().StringVarP(&envName, "env", "e", "", "The environment to fetch the API for (defaults to the primary environment)") -} + _ = genClientCmd.MarkFlagFilename("output", "go", "ts", "tsx", "js", "jsx") -func detectLang(path string) (string, bool) { - suffix := strings.ToLower(filepath.Ext(path)) - switch suffix { - case ".ts": - return "typescript", true - default: - return "", false - } + genClientCmd.Flags().StringVarP(&envName, "env", "e", "local", "The environment to fetch the API for (defaults to the local environment)") + _ = genClientCmd.RegisterFlagCompletionFunc("env", cmdutil.AutoCompleteEnvSlug) + + genClientCmd.Flags().StringSliceVarP(&genServiceNames, "services", "s", nil, "The names of the services to include in the output") + genClientCmd.Flags().StringSliceVarP(&excludedServices, "excluded-services", "x", nil, "The names of the services to exclude in the output") + genClientCmd.Flags().StringSliceVarP(&endpointTags, "tags", "t", nil, "The names of endpoint tags to include in the output") + genClientCmd.Flags(). + StringSliceVar(&excludedEndpointTags, "excluded-tags", nil, "The names of endpoint tags to exclude in the output") + genClientCmd.Flags(). + BoolVar(&openAPIExcludePrivateEndpoints, "openapi-exclude-private-endpoints", false, "Exclude private endpoints from the OpenAPI spec") + genClientCmd.Flags(). + BoolVar(&tsSharedTypes, "ts:shared-types", false, "Import types from ~backend instead of re-generating them") + genClientCmd.Flags().StringVar(&target, "target", "", "An optional target for the client (\"leap\")") + _ = genClientCmd.RegisterFlagCompletionFunc("target", cmdutil.AutoCompleteFromStaticList( + "leap\tA TypeScript client for apps created with Leap (https://leap.new) ", + )) } diff --git a/cli/cmd/encore/init_windows.go b/cli/cmd/encore/init_windows.go new file mode 100644 index 0000000000..2578b7a25f --- /dev/null +++ b/cli/cmd/encore/init_windows.go @@ -0,0 +1,23 @@ +//go:build windows +// +build windows + +package main + +import ( + "golang.org/x/sys/windows" +) + +// init activates virtual terminal feature on "windows", this enables colored +// terminal output. +func init() { + setConsoleMode(windows.Stdout, windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING) + setConsoleMode(windows.Stderr, windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING) +} + +// setConsoleMode enables VT processing on stout and stderr. +func setConsoleMode(handle windows.Handle, flag uint32) { + var mode uint32 + if err := windows.GetConsoleMode(handle, &mode); err == nil { + windows.SetConsoleMode(handle, mode|flag) + } +} diff --git a/cli/cmd/encore/k8s/auth.go b/cli/cmd/encore/k8s/auth.go new file mode 100644 index 0000000000..cb8ac804c6 --- /dev/null +++ b/cli/cmd/encore/k8s/auth.go @@ -0,0 +1,56 @@ +package k8s + +import ( + "encoding/json" + "os" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/k8s/types" + "encr.dev/internal/conf" +) + +var genAuthCmd = &cobra.Command{ + Use: "exec-credentials", + Short: "Used by kubectl to get an authentication token for the Encore Kubernetes Proxy", + Args: cobra.NoArgs, + Hidden: true, + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { generateExecCredentials() }, +} + +func init() { + kubernetesCmd.AddCommand(genAuthCmd) +} + +// GenerateExecCredentials generates the Kubernetes exec credentials and writes them to stdout. +// +// If an error occurs, it is written to stderr and the program exits with a non-zero exit code. +func generateExecCredentials() { + // Get the OAuth token from the Encore API + token, err := conf.DefaultTokenSource.Token() + if err != nil { + cmdutil.Fatalf("error getting token: %v", err) + } + + // Generate the kuberentes exec credentials datastructures + expiryTime := types.NewTime(token.Expiry) + execCredentials := &types.ExecCredential{ + TypeMeta: types.TypeMeta{ + APIVersion: "client.authentication.k8s.io/v1", + Kind: "ExecCredential", + }, + Status: &types.ExecCredentialStatus{ + Token: token.AccessToken, + ExpirationTimestamp: &expiryTime, + }, + } + + // Marshal the exec credentials to JSON and write to stdout + output, err := json.MarshalIndent(execCredentials, "", " ") + if err != nil { + cmdutil.Fatalf("error marshalling exec credentials: %v", err) + } + _, _ = os.Stdout.Write(output) +} diff --git a/cli/cmd/encore/k8s/config.go b/cli/cmd/encore/k8s/config.go new file mode 100644 index 0000000000..a5b0834134 --- /dev/null +++ b/cli/cmd/encore/k8s/config.go @@ -0,0 +1,279 @@ +package k8s + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "slices" + "strings" + "text/tabwriter" + "time" + + "github.com/cockroachdb/errors" + "github.com/fatih/color" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/k8s/types" + "encr.dev/cli/internal/platform" + "encr.dev/internal/conf" + "encr.dev/pkg/xos" + + "sigs.k8s.io/yaml" +) + +var configCmd = &cobra.Command{ + Use: "configure --env=ENV_NAME", + Short: "Updates your kubectl config to point to the Kubernetes cluster(s) for the specified environment", + Run: func(cmd *cobra.Command, args []string) { + appSlug := cmdutil.AppSlug() + ctx, cancel := context.WithTimeout(cmd.Context(), 5*time.Second) + defer cancel() + + if k8sEnvName == "" { + _ = cmd.Help() + cmdutil.Fatal("must specify environment name with --env") + } + + err := configureForAppEnv(ctx, appSlug, k8sEnvName) + if err != nil { + cmdutil.Fatalf("error configuring kubectl: %v", err) + } + }, +} + +var ( + k8sEnvName string +) + +func init() { + configCmd.Flags().StringVarP(&k8sEnvName, "env", "e", "", "Environment name") + _ = configCmd.MarkFlagRequired("env") + kubernetesCmd.AddCommand(configCmd) +} + +func configureForAppEnv(ctx context.Context, appID string, envName string) error { + appSlug, envName, clusters, err := platform.KubernetesClusters(ctx, appID, envName) + if err != nil { + return errors.Wrap(err, "unable to get Kubernetes clusters for environment") + } + if len(clusters) == 0 { + return errors.New("no Kubernetes clusters found for environment") + } + + // Read the existing kubeconfig file + configFilePath := filepath.Join(types.HomeDir(), ".kube", "config") + cfg, err := readKubeConfig(configFilePath) + if err != nil { + return err + } + + // Add the clusters + contextPrefix := fmt.Sprintf("encore_%s_%s", appSlug, envName) + authName := "encore-proxy-auth" + contextNames := make([]string, len(clusters)) + for i, cluster := range clusters { + // Create a context name for the cluster + // by default we use the app slug and env name seperated by a underscore (e.g. encore-myapp_prod) + // however if the environment has multiple clusters then we also include the cluster name (e.g. encore-myapp_prod_cluster1) + contextName := contextPrefix + if len(clusters) > 1 { + contextName += "_" + cluster.Name + } + contextNames[i] = contextName + + // Add the cluster using the cluster name as the context name + cfg.clusters = appendOrUpdate(cfg.clusters, map[string]any{ + "name": contextName, + "cluster": map[string]any{ + "server": fmt.Sprintf("%s/k8s-api-proxy/%s/%s/", conf.APIBaseURL, cluster.EnvID, cluster.ResID), + }, + }) + + k8sContext := map[string]any{ + "cluster": contextName, + "user": authName, + } + if cluster.DefaultNamespace != "" { + k8sContext["namespace"] = cluster.DefaultNamespace + } + + cfg.contexts = appendOrUpdate(cfg.contexts, map[string]any{ + "name": contextName, + "context": k8sContext, + }) + } + + // Remove any old contexts or clusters + // We do this by iterating over the existing contexts and clusters and removing any that are not in the new list + for i := len(cfg.contexts) - 1; i >= 0; i-- { + if foundContext, ok := cfg.contexts[i].(map[string]any); ok { + if contextName, ok := foundContext["name"].(string); ok { + if strings.HasPrefix(contextName, contextPrefix) && !slices.Contains(contextNames, contextName) { + cfg.contexts = append(cfg.contexts[:i], cfg.contexts[i+1:]...) + } + } + } + } + for i := len(cfg.clusters) - 1; i >= 0; i-- { + if foundCluster, ok := cfg.clusters[i].(map[string]any); ok { + if clusterName, ok := foundCluster["name"].(string); ok { + if strings.HasPrefix(clusterName, contextPrefix) && !slices.Contains(contextNames, clusterName) { + cfg.clusters = append(cfg.clusters[:i], cfg.clusters[i+1:]...) + } + } + } + } + + // If we added a cluster then we need to update the encore-k8s-proxy user + cfg.users = appendOrUpdate(cfg.users, map[string]any{ + "name": authName, + "user": map[string]any{ + "exec": map[string]any{ + "apiVersion": "client.authentication.k8s.io/v1", + "args": []string{"kubernetes", "exec-credentials"}, + "command": "encore", + "env": nil, + "installHint": "Install encore for use with kubectl, see https://encore.dev", + "interactiveMode": "Never", + "provideClusterInfo": false, + }, + }, + }) + + // Update the current context to the first cluster for the environment + cfg.raw["current-context"] = contextNames[0] + + if err := writeKubeConfig(configFilePath, cfg); err != nil { + return err + } + + if len(clusters) == 1 { + _, _ = fmt.Fprintf(os.Stdout, "kubectl configured for cluster %s under context %s.\n", color.CyanString(clusters[0].Name), color.CyanString(contextNames[0])) + } else { + _, _ = fmt.Fprintf(os.Stdout, "kubectl configured for %d clusters:\n\n", len(clusters)) + + w := tabwriter.NewWriter(os.Stdout, 0, 0, 3, ' ', tabwriter.StripEscape) + _, _ = fmt.Fprint(w, "CLUSTER\tCONTEXT\tACTIVE\n") + for i, cluster := range clusters { + active := "" + if i == 0 { + active = "yes" + } + _, _ = fmt.Fprintf(w, "%s\t%s\t%s\n", cluster.Name, contextNames[0], active) + } + _ = w.Flush() + } + + return nil +} + +// readKubeConfig reads the existing kubeconfig file and returns a Cfg struct. +// however this is as untyped as possible, so that we can easily marshal it back without losing any data. +func readKubeConfig(file string) (*Cfg, error) { + b, err := os.ReadFile(file) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + return nil, errors.Wrap(err, "unable to read kubeconfig file") + } + + // Read the existing kubeconfig file + var kubeConfig map[string]any + if len(b) > 0 { + if err = yaml.Unmarshal(b, &kubeConfig); err != nil { + return nil, errors.Wrap(err, "unable to parse kubeconfig file") + } + } + + // Ensure the kubeConfig struct is valid + if kubeConfig == nil { + kubeConfig = map[string]any{ + "apiVersion": "v1", + "kind": "Config", + } + } else if kubeConfig["apiVersion"] != "v1" || kubeConfig["kind"] != "Config" { + return nil, errors.New("invalid existing kubeconfig file") + } + cfg := &Cfg{ + raw: kubeConfig, + } + + if clusters, ok := kubeConfig["clusters"]; ok { + if clusters, ok := clusters.([]any); ok { + cfg.clusters = clusters + } else { + return nil, errors.Newf("clusters is not an array got %T", clusters) + } + } + + if users, ok := kubeConfig["users"]; ok { + if users, ok := users.([]any); ok { + cfg.users = users + } else { + return nil, errors.Newf("users is not an array got %T", users) + } + } + + if contexts, ok := kubeConfig["contexts"]; ok { + if contexts, ok := contexts.([]any); ok { + cfg.contexts = contexts + } else { + return nil, errors.Newf("contexts is not an array got %T", contexts) + } + } + + return cfg, nil +} + +// writeKubeConfig writes the kubeconfig back to the file. +func writeKubeConfig(file string, cfg *Cfg) error { + // Update the raw kubeconfig struct + cfg.raw["clusters"] = cfg.clusters + cfg.raw["users"] = cfg.users + cfg.raw["contexts"] = cfg.contexts + + b, err := yaml.Marshal(cfg.raw) + if err != nil { + return errors.Wrap(err, "unable to marshal kubeconfig back into yaml") + } + + // Ensure the directory exists + if err := os.MkdirAll(filepath.Dir(file), 0755); err != nil { + return errors.Wrap(err, "unable to create kubeconfig directory") + } + + // Then write the file + err = xos.WriteFile(file, b, 0600) + if err != nil { + return errors.Wrap(err, "unable to write kubeconfig file") + } + return nil +} + +type Cfg struct { + raw map[string]any + clusters []any + users []any + contexts []any +} + +// appendOrUpdate looks at the array for an entry which is a map and has a "name" key which matches the name in val, if found +// it will update the entry with val, otherwise it will append val to the array. +func appendOrUpdate(dst []any, val map[string]any) []any { + idx := slices.IndexFunc(dst, func(entry any) bool { + if entry, ok := entry.(map[string]any); ok { + if entry["name"] == val["name"] { + return true + } + } + return false + }) + + if idx == -1 { + return append(dst, val) + } else { + dst[idx] = val + return dst + } +} diff --git a/cli/cmd/encore/k8s/kubernetes.go b/cli/cmd/encore/k8s/kubernetes.go new file mode 100644 index 0000000000..86a21bd4e8 --- /dev/null +++ b/cli/cmd/encore/k8s/kubernetes.go @@ -0,0 +1,17 @@ +package k8s + +import ( + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/root" +) + +var kubernetesCmd = &cobra.Command{ + Use: "kubernetes", + Short: "Kubernetes management commands", + Aliases: []string{"k8s"}, +} + +func init() { + root.Cmd.AddCommand(kubernetesCmd) +} diff --git a/cli/cmd/encore/k8s/types/KUBERNETES_LICENSE.txt b/cli/cmd/encore/k8s/types/KUBERNETES_LICENSE.txt new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/cli/cmd/encore/k8s/types/KUBERNETES_LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/cli/cmd/encore/k8s/types/README.md b/cli/cmd/encore/k8s/types/README.md new file mode 100644 index 0000000000..038a9fcdbe --- /dev/null +++ b/cli/cmd/encore/k8s/types/README.md @@ -0,0 +1,4 @@ +# Kubernetes Types + +This package contains types copied directly from the [Kubernetes](https://github.com/kubernetes/kubernetes) project, this +is to prevent the Encore CLI needing to have a dependency on the Kubernetes project for just these types. diff --git a/cli/cmd/encore/k8s/types/clientauthentication_types.go b/cli/cmd/encore/k8s/types/clientauthentication_types.go new file mode 100644 index 0000000000..29e31d3aa4 --- /dev/null +++ b/cli/cmd/encore/k8s/types/clientauthentication_types.go @@ -0,0 +1,120 @@ +/* +Copyright 2021 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package types + +// ExecCredential is used by exec-based plugins to communicate credentials to +// HTTP transports. +type ExecCredential struct { + TypeMeta `json:",inline"` + + // Spec holds information passed to the plugin by the transport. + Spec ExecCredentialSpec `json:"spec,omitempty"` + + // Status is filled in by the plugin and holds the credentials that the transport + // should use to contact the API. + // +optional + Status *ExecCredentialStatus `json:"status,omitempty"` +} + +// ExecCredentialSpec holds request and runtime specific information provided by +// the transport. +type ExecCredentialSpec struct { + // Cluster contains information to allow an exec plugin to communicate with the + // kubernetes cluster being authenticated to. Note that Cluster is non-nil only + // when provideClusterInfo is set to true in the exec provider config (i.e., + // ExecConfig.ProvideClusterInfo). + // +optional + Cluster *Cluster `json:"cluster,omitempty"` + + // Interactive declares whether stdin has been passed to this exec plugin. + Interactive bool `json:"interactive"` +} + +// ExecCredentialStatus holds credentials for the transport to use. +// +// Token and ClientKeyData are sensitive fields. This data should only be +// transmitted in-memory between client and exec plugin process. Exec plugin +// itself should at least be protected via file permissions. +type ExecCredentialStatus struct { + // ExpirationTimestamp indicates a time when the provided credentials expire. + // +optional + ExpirationTimestamp *Time `json:"expirationTimestamp,omitempty"` + // Token is a bearer token used by the client for request authentication. + Token string `json:"token,omitempty" datapolicy:"token"` + // PEM-encoded client TLS certificates (including intermediates, if any). + ClientCertificateData string `json:"clientCertificateData,omitempty"` + // PEM-encoded private key for the above certificate. + ClientKeyData string `json:"clientKeyData,omitempty" datapolicy:"security-key"` +} + +// Cluster contains information to allow an exec plugin to communicate +// with the kubernetes cluster being authenticated to. +// +// To ensure that this struct contains everything someone would need to communicate +// with a kubernetes cluster (just like they would via a kubeconfig), the fields +// should shadow "k8s.io/client-go/tools/clientcmd/api/v1".Cluster, with the exception +// of CertificateAuthority, since CA data will always be passed to the plugin as bytes. +type Cluster struct { + // Server is the address of the kubernetes cluster (https://hostname:port). + Server string `json:"server"` + // TLSServerName is passed to the server for SNI and is used in the client to + // check server certificates against. If ServerName is empty, the hostname + // used to contact the server is used. + // +optional + TLSServerName string `json:"tls-server-name,omitempty"` + // InsecureSkipTLSVerify skips the validity check for the server's certificate. + // This will make your HTTPS connections insecure. + // +optional + InsecureSkipTLSVerify bool `json:"insecure-skip-tls-verify,omitempty"` + // CAData contains PEM-encoded certificate authority certificates. + // If empty, system roots should be used. + // +listType=atomic + // +optional + CertificateAuthorityData []byte `json:"certificate-authority-data,omitempty"` + // ProxyURL is the URL to the proxy to be used for all requests to this + // cluster. + // +optional + ProxyURL string `json:"proxy-url,omitempty"` + // DisableCompression allows client to opt-out of response compression for all requests to the server. This is useful + // to speed up requests (specifically lists) when client-server network bandwidth is ample, by saving time on + // compression (server-side) and decompression (client-side): https://github.com/kubernetes/kubernetes/issues/112296. + // +optional + DisableCompression bool `json:"disable-compression,omitempty"` + // Config holds additional config data that is specific to the exec + // plugin with regards to the cluster being authenticated to. + // + // This data is sourced from the clientcmd Cluster object's + // extensions[client.authentication.k8s.io/exec] field: + // + // clusters: + // - name: my-cluster + // cluster: + // ... + // extensions: + // - name: client.authentication.k8s.io/exec # reserved extension name for per cluster exec config + // extension: + // audience: 06e3fbd18de8 # arbitrary config + // + // In some environments, the user config may be exactly the same across many clusters + // (i.e. call this exec plugin) minus some details that are specific to each cluster + // such as the audience. This field allows the per cluster config to be directly + // specified with the cluster info. Using this field to store secret data is not + // recommended as one of the prime benefits of exec plugins is that no secrets need + // to be stored directly in the kubeconfig. + // +optional + Config RawExtension `json:"config,omitempty"` +} diff --git a/cli/cmd/encore/k8s/types/homedir.go b/cli/cmd/encore/k8s/types/homedir.go new file mode 100644 index 0000000000..c041f40c71 --- /dev/null +++ b/cli/cmd/encore/k8s/types/homedir.go @@ -0,0 +1,92 @@ +/* +Copyright 2016 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package types + +import ( + "os" + "path/filepath" + "runtime" +) + +// HomeDir returns the home directory for the current user. +// On Windows: +// 1. the first of %HOME%, %HOMEDRIVE%%HOMEPATH%, %USERPROFILE% containing a `.kube\config` file is returned. +// 2. if none of those locations contain a `.kube\config` file, the first of %HOME%, %USERPROFILE%, %HOMEDRIVE%%HOMEPATH% that exists and is writeable is returned. +// 3. if none of those locations are writeable, the first of %HOME%, %USERPROFILE%, %HOMEDRIVE%%HOMEPATH% that exists is returned. +// 4. if none of those locations exists, the first of %HOME%, %USERPROFILE%, %HOMEDRIVE%%HOMEPATH% that is set is returned. +func HomeDir() string { + if runtime.GOOS == "windows" { + home := os.Getenv("HOME") + homeDriveHomePath := "" + if homeDrive, homePath := os.Getenv("HOMEDRIVE"), os.Getenv("HOMEPATH"); len(homeDrive) > 0 && len(homePath) > 0 { + homeDriveHomePath = homeDrive + homePath + } + userProfile := os.Getenv("USERPROFILE") + + // Return first of %HOME%, %HOMEDRIVE%/%HOMEPATH%, %USERPROFILE% that contains a `.kube\config` file. + // %HOMEDRIVE%/%HOMEPATH% is preferred over %USERPROFILE% for backwards-compatibility. + for _, p := range []string{home, homeDriveHomePath, userProfile} { + if len(p) == 0 { + continue + } + if _, err := os.Stat(filepath.Join(p, ".kube", "config")); err != nil { + continue + } + return p + } + + firstSetPath := "" + firstExistingPath := "" + + // Prefer %USERPROFILE% over %HOMEDRIVE%/%HOMEPATH% for compatibility with other auth-writing tools + for _, p := range []string{home, userProfile, homeDriveHomePath} { + if len(p) == 0 { + continue + } + if len(firstSetPath) == 0 { + // remember the first path that is set + firstSetPath = p + } + info, err := os.Stat(p) + if err != nil { + continue + } + if len(firstExistingPath) == 0 { + // remember the first path that exists + firstExistingPath = p + } + if info.IsDir() && info.Mode().Perm()&(1<<(uint(7))) != 0 { + // return first path that is writeable + return p + } + } + + // If none are writeable, return first location that exists + if len(firstExistingPath) > 0 { + return firstExistingPath + } + + // If none exist, return first location that is set + if len(firstSetPath) > 0 { + return firstSetPath + } + + // We've got nothing + return "" + } + return os.Getenv("HOME") +} diff --git a/cli/cmd/encore/k8s/types/meta_types.go b/cli/cmd/encore/k8s/types/meta_types.go new file mode 100644 index 0000000000..b9dadd8cfe --- /dev/null +++ b/cli/cmd/encore/k8s/types/meta_types.go @@ -0,0 +1,96 @@ +/* +Copyright 2021 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package types + +import ( + "encoding/json" + "time" +) + +// TypeMeta describes an individual object in an API response or request +// with strings representing the type of the object and its API schema version. +// Structures that are versioned or persisted should inline TypeMeta. +// +// +k8s:deepcopy-gen=false +type TypeMeta struct { + // Kind is a string value representing the REST resource this object represents. + // Servers may infer this from the endpoint the client submits requests to. + // Cannot be updated. + // In CamelCase. + // More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds + // +optional + Kind string `json:"kind,omitempty" protobuf:"bytes,1,opt,name=kind"` + + // APIVersion defines the versioned schema of this representation of an object. + // Servers should convert recognized schemas to the latest internal value, and + // may reject unrecognized values. + // More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources + // +optional + APIVersion string `json:"apiVersion,omitempty" protobuf:"bytes,2,opt,name=apiVersion"` +} + +// Time is a wrapper around time.Time which supports correct +// marshaling to YAML and JSON. Wrappers are provided for many +// of the factory methods that the time package offers. +// +// +protobuf.options.marshal=false +// +protobuf.as=Timestamp +// +protobuf.options.(gogoproto.goproto_stringer)=false +type Time struct { + time.Time `protobuf:"-"` +} + +// NewTime returns a wrapped instance of the provided time +func NewTime(time time.Time) Time { + return Time{time} +} + +// UnmarshalJSON implements the json.Unmarshaller interface. +func (t *Time) UnmarshalJSON(b []byte) error { + if len(b) == 4 && string(b) == "null" { + t.Time = time.Time{} + return nil + } + + var str string + err := json.Unmarshal(b, &str) + if err != nil { + return err + } + + pt, err := time.Parse(time.RFC3339, str) + if err != nil { + return err + } + + t.Time = pt.Local() + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (t Time) MarshalJSON() ([]byte, error) { + if t.IsZero() { + // Encode unset/nil objects as JSON's "null". + return []byte("null"), nil + } + buf := make([]byte, 0, len(time.RFC3339)+2) + buf = append(buf, '"') + // time cannot contain non escapable JSON characters + buf = t.UTC().AppendFormat(buf, time.RFC3339) + buf = append(buf, '"') + return buf, nil +} diff --git a/cli/cmd/encore/k8s/types/runtime_types.go b/cli/cmd/encore/k8s/types/runtime_types.go new file mode 100644 index 0000000000..23cf5a5160 --- /dev/null +++ b/cli/cmd/encore/k8s/types/runtime_types.go @@ -0,0 +1,76 @@ +/* +Copyright 2014 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package types + +// RawExtension is used to hold extensions in external versions. +// +// To use this, make a field which has RawExtension as its type in your external, versioned +// struct, and Object in your internal struct. You also need to register your +// various plugin types. +// +// // Internal package: +// +// type MyAPIObject struct { +// runtime.TypeMeta `json:",inline"` +// MyPlugin runtime.Object `json:"myPlugin"` +// } +// +// type PluginA struct { +// AOption string `json:"aOption"` +// } +// +// // External package: +// +// type MyAPIObject struct { +// runtime.TypeMeta `json:",inline"` +// MyPlugin runtime.RawExtension `json:"myPlugin"` +// } +// +// type PluginA struct { +// AOption string `json:"aOption"` +// } +// +// // On the wire, the JSON will look something like this: +// +// { +// "kind":"MyAPIObject", +// "apiVersion":"v1", +// "myPlugin": { +// "kind":"PluginA", +// "aOption":"foo", +// }, +// } +// +// So what happens? Decode first uses json or yaml to unmarshal the serialized data into +// your external MyAPIObject. That causes the raw JSON to be stored, but not unpacked. +// The next step is to copy (using pkg/conversion) into the internal struct. The runtime +// package's DefaultScheme has conversion functions installed which will unpack the +// JSON stored in RawExtension, turning it into the correct object type, and storing it +// in the Object. (TODO: In the case where the object is of an unknown type, a +// runtime.Unknown object will be created and stored.) +// +// +k8s:deepcopy-gen=true +// +protobuf=true +// +k8s:openapi-gen=true +type RawExtension struct { + // Raw is the underlying serialization of this object. + // + // TODO: Determine how to detect ContentType and ContentEncoding of 'Raw' data. + Raw []byte `json:"-" protobuf:"bytes,1,opt,name=raw"` + // Object can hold a representation of this extension - useful for working with versioned + // structs. + Object any `json:"-"` +} diff --git a/cli/cmd/encore/logs.go b/cli/cmd/encore/logs.go index d9bb39e1a8..307d7e17c3 100644 --- a/cli/cmd/encore/logs.go +++ b/cli/cmd/encore/logs.go @@ -3,26 +3,33 @@ package main import ( "bytes" "context" + "encoding/json" + "errors" "fmt" - "io" "os" "os/signal" + "time" - daemonpb "encr.dev/proto/encore/daemon" + "github.com/gorilla/websocket" + "github.com/logrusorgru/aurora/v3" "github.com/rs/zerolog" "github.com/spf13/cobra" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" + + "encr.dev/cli/internal/platform" + "encr.dev/pkg/appfile" ) var ( - logsEnv string - logsJSON bool + logsEnv string + logsJSON bool + logsQuiet bool ) var logsCmd = &cobra.Command{ - Use: "logs", + Use: "logs [--env=prod] [--json]", Short: "Streams logs from your application", + + DisableFlagsInUseLine: true, Run: func(cmd *cobra.Command, args []string) { appRoot, _ := determineAppRoot() streamLogs(appRoot, logsEnv) @@ -30,51 +37,104 @@ var logsCmd = &cobra.Command{ } func streamLogs(appRoot, envName string) { - interrupt := make(chan os.Signal, 1) - signal.Notify(interrupt, os.Interrupt) + ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) + defer cancel() + appSlug, err := appfile.Slug(appRoot) + if err != nil { + fatal(err) + } else if appSlug == "" { + fatal("app is not linked with Encore Cloud") + } + + if envName == "" { + envName = "@primary" + } + logs, err := platform.EnvLogs(ctx, appSlug, envName) + if err != nil { + var e platform.Error + if errors.As(err, &e) { + switch e.Code { + case "env_not_found": + fatalf("environment %q not found", envName) + } + } + fatal(err) + } - ctx, cancel := context.WithCancel(context.Background()) go func() { - <-interrupt - cancel() + <-ctx.Done() + logs.Close() }() - daemon := setupDaemon(ctx) - stream, err := daemon.Logs(ctx, &daemonpb.LogsRequest{ - AppRoot: appRoot, - EnvName: envName, - }) - if err != nil { - fatal("could not stream logs: ", err) + // Use the same configuration as the runtime + zerolog.TimeFieldFormat = time.RFC3339Nano + + if !logsQuiet { + fmt.Println(aurora.Gray(12, "Connected, waiting for logs...")) } cw := zerolog.NewConsoleWriter() for { - msg, err := stream.Recv() - if err == io.EOF || status.Code(err) == codes.Canceled { + _, message, err := logs.ReadMessage() + if err != nil { + if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway, websocket.CloseAbnormalClosure) { + fatal("the server closed the connection unexpectedly.") + } return - } else if err != nil { - fatal(err) } - for _, line := range msg.Lines { + + lines := bytes.Split(message, []byte("\n")) + for _, line := range lines { // Pretty-print logs if requested and it looks like a JSON log line if !logsJSON && bytes.HasPrefix(line, []byte{'{'}) { - if _, err := cw.Write(line); err != nil { + if _, err := cw.Write(mapCloudFieldNamesToExpected(line)); err != nil { // Fall back to regular stdout in case of error os.Stdout.Write(line) + os.Stdout.Write([]byte("\n")) } } else { os.Stdout.Write(line) + os.Stdout.Write([]byte("\n")) } } - if msg.DropNotice { - fmt.Fprintln(os.Stderr, "--- NOTICE: log lines were not sent due to high volume or slow reader ---") - } } } +// mapCloudFieldNamesToExpected detects if we're logging with GCP style logging and then swaps +// the field names to what is expected by zerolog +func mapCloudFieldNamesToExpected(jsonBytes []byte) []byte { + unmarshaled := map[string]any{} + err := json.Unmarshal(jsonBytes, &unmarshaled) + if err != nil { + return jsonBytes + } + + _, hasSeverity := unmarshaled["severity"] + _, hasExpectedLevelField := unmarshaled[zerolog.LevelFieldName] + _, hasTimestamp := unmarshaled["timestamp"] + _, hasExpectedTimeField := unmarshaled[zerolog.TimestampFieldName] + + // GCP logs have a severity field and a timestamp field and not the default level and timestamp + if hasSeverity && !hasExpectedLevelField && hasTimestamp && !hasExpectedTimeField { + unmarshaled[zerolog.LevelFieldName] = unmarshaled["severity"] + delete(unmarshaled, "severity") + unmarshaled[zerolog.TimestampFieldName] = unmarshaled["timestamp"] + delete(unmarshaled, "timestamp") + } else { + // No changes, return the original bytes unmodified + return jsonBytes + } + + newBytes, err := json.Marshal(unmarshaled) + if err != nil { + return jsonBytes + } + return newBytes +} + func init() { rootCmd.AddCommand(logsCmd) - logsCmd.Flags().StringVarP(&logsEnv, "env", "e", "", "Environment name to stream logs from (defaults to the production environment)") + logsCmd.Flags().StringVarP(&logsEnv, "env", "e", "", "Environment name to stream logs from (defaults to the primary environment)") logsCmd.Flags().BoolVar(&logsJSON, "json", false, "Whether to print logs in raw JSON format") + logsCmd.Flags().BoolVarP(&logsQuiet, "quiet", "q", false, "Whether to print initial message when the command is waiting for logs") } diff --git a/cli/cmd/encore/main.go b/cli/cmd/encore/main.go index 4b5f845888..ffb4dc4a77 100644 --- a/cli/cmd/encore/main.go +++ b/cli/cmd/encore/main.go @@ -2,28 +2,29 @@ package main import ( "fmt" - "io" "os" - "path/filepath" - daemonpb "encr.dev/proto/encore/daemon" - "github.com/fatih/color" "github.com/rs/zerolog" "github.com/rs/zerolog/log" - "github.com/spf13/cobra" "golang.org/x/tools/go/packages" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + + // Register commands + _ "encr.dev/cli/cmd/encore/app" + _ "encr.dev/cli/cmd/encore/config" + _ "encr.dev/cli/cmd/encore/k8s" + _ "encr.dev/cli/cmd/encore/namespace" + _ "encr.dev/cli/cmd/encore/secrets" ) -var rootCmd = &cobra.Command{ - Use: "encore", - Short: "encore is the fastest way of developing backend applications", -} +// for backwards compatibility, for now +var rootCmd = root.Cmd func main() { log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}) - if err := rootCmd.Execute(); err != nil { + if err := root.Cmd.Execute(); err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } @@ -36,30 +37,11 @@ func main() { // relative path from the app root to the working directory. // On errors it prints an error message and exits. func determineAppRoot() (appRoot, relPath string) { - dir, err := os.Getwd() - if err != nil { - fatal(err) - } - rel := "." - for { - path := filepath.Join(dir, "encore.app") - fi, err := os.Stat(path) - if os.IsNotExist(err) { - dir2 := filepath.Dir(dir) - if dir2 == dir { - fatal("no encore.app found in directory (or any of the parent directories).") - } - rel = filepath.Join(filepath.Base(dir), rel) - dir = dir2 - continue - } else if err != nil { - fatal(err) - } else if fi.IsDir() { - fatal("encore.app is a directory, not a file") - } else { - return dir, rel - } - } + return cmdutil.AppRoot() +} + +func determineWorkspaceRoot(appRoot string) string { + return cmdutil.WorkspaceRoot(appRoot) } func resolvePackages(dir string, patterns ...string) ([]string, error) { @@ -78,69 +60,22 @@ func resolvePackages(dir string, patterns ...string) ([]string, error) { return paths, nil } -// commandOutputStream is the interface for gRPC streams that -// stream the output of a command. -type commandOutputStream interface { - Recv() (*daemonpb.CommandMessage, error) -} - -// streamCommandOutput streams the output from the given command stream, -// and exits with the same exit code as the command. -func streamCommandOutput(stream commandOutputStream) { - for { - msg, err := stream.Recv() - if err != nil { - st := status.Convert(err) - switch { - case st.Code() == codes.FailedPrecondition: - fmt.Fprintln(os.Stderr, st.Message()) - os.Exit(1) - case err == io.EOF || st.Code() == codes.Canceled: - return - default: - log.Fatal().Err(err).Msg("connection failure") - } - } - - switch m := msg.Msg.(type) { - case *daemonpb.CommandMessage_Output: - if m.Output.Stdout != nil { - os.Stdout.Write(m.Output.Stdout) - } - if m.Output.Stderr != nil { - os.Stderr.Write(m.Output.Stderr) - } - case *daemonpb.CommandMessage_Exit: - os.Exit(int(m.Exit.Code)) - } - } +func displayError(out *os.File, err []byte) { + cmdutil.DisplayError(out, err) } func fatal(args ...interface{}) { - // Prettify gRPC errors - for i, arg := range args { - if err, ok := arg.(error); ok { - if s, ok := status.FromError(err); ok { - args[i] = s.Message() - } - } - } - - red := color.New(color.FgRed) - red.Fprint(os.Stderr, "error: ") - red.Fprintln(os.Stderr, args...) - os.Exit(1) + cmdutil.Fatal(args...) } func fatalf(format string, args ...interface{}) { - // Prettify gRPC errors - for i, arg := range args { - if err, ok := arg.(error); ok { - if s, ok := status.FromError(err); ok { - args[i] = s.Message() - } - } - } + cmdutil.Fatalf(format, args...) +} - fatal(fmt.Sprintf(format, args...)) +func nonZeroPtr[T comparable](v T) *T { + var zero T + if v == zero { + return nil + } + return &v } diff --git a/cli/cmd/encore/mcp.go b/cli/cmd/encore/mcp.go new file mode 100644 index 0000000000..9ce2b5cf7f --- /dev/null +++ b/cli/cmd/encore/mcp.go @@ -0,0 +1,412 @@ +package main + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "maps" + "net/http" + "os" + "strings" + "sync" + "time" + + "github.com/logrusorgru/aurora/v3" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + "encr.dev/cli/internal/jsonrpc2" +) + +var mcpCmd = &cobra.Command{ + Use: "mcp", + Short: "MCP (Message Context Provider) commands", +} + +var ( + appID string + mcpPort int = 9900 +) + +var startCmd = &cobra.Command{ + Use: "start", + Short: "Starts an SSE based MCP session and prints the SSE URL", + Run: func(cmd *cobra.Command, args []string) { + ctx := cmd.Context() + if appID == "" { + appID = cmdutil.AppSlugOrLocalID() + } + setupDaemon(ctx) + + _, _ = fmt.Fprintf(os.Stderr, " MCP Service is running!\n\n") + _, _ = fmt.Fprintf(os.Stderr, " MCP SSE URL: %s\n", aurora.Cyan(fmt.Sprintf( + "http://localhost:%d/sse?app=%s", mcpPort, appID))) + _, _ = fmt.Fprintf(os.Stderr, " MCP stdio Command: %s\n", aurora.Cyan(fmt.Sprintf( + "encore mcp run --app=%s", appID))) + }, +} + +type sseConnection struct { + read func() (typ, data string, err error) + close func() error + + appID string + connected bool + path string + client *http.Client + + // Track outstanding request IDs + mu sync.Mutex + requestIDs map[jsonrpc2.ID]struct{} +} + +func (c *sseConnection) Read() (typ, data string, err error) { + typ, data, err = c.read() + if err != nil { + c.connected = false + return "", "", err + } + return typ, data, nil +} + +func (c *sseConnection) Close() error { + if c.close != nil { + c.connected = false + return c.close() + } + return nil +} + +func (c *sseConnection) reconnect(ctx context.Context) error { + // Close the existing connection if there is one + if c.close != nil { + _ = c.close() + } + c.connected = false + + // Initial backoff duration + backoff := 1000 * time.Millisecond + maxBackoff := 10 * time.Second + + for { + // Check if context is canceled + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + if root.Verbosity > 0 { + fmt.Fprintf(os.Stderr, "Reconnecting to MCP: %v\n", backoff) + } + + // Try to connect + err := c.connect(ctx) + if err == nil { + c.connected = true + return nil + } + + // If connection failed, wait and retry with exponential backoff + if root.Verbosity > 0 { + fmt.Fprintf(os.Stderr, "Failed to connect to MCP: %v, retrying in %v\n", err, backoff) + } + + select { + case <-time.After(backoff): + // Double the backoff for next attempt, but cap at maxBackoff + backoff *= 2 + if backoff > maxBackoff { + backoff = maxBackoff + } + case <-ctx.Done(): + return ctx.Err() + } + } +} + +func (c *sseConnection) connect(ctx context.Context) error { + setupDaemon(ctx) + if c.client == nil { + c.client = &http.Client{} + } + + // Initialize the request IDs map + c.mu.Lock() + c.requestIDs = make(map[jsonrpc2.ID]struct{}) + c.mu.Unlock() + + resp, err := c.client.Get(fmt.Sprintf("http://localhost:%d/sse?app=%s", mcpPort, c.appID)) + if err != nil { + return err + } + if resp.StatusCode != 200 { + resp.Body.Close() + return fmt.Errorf("error getting session ID: %v", resp.Status) + } + c.read = eventReader(startLineReader(ctx, bufio.NewReader(resp.Body).ReadString)) + c.close = resp.Body.Close + c.connected = true + + // Read the endpoint path + event, path, err := c.Read() + if err != nil { + return fmt.Errorf("error reading endpoint path: %v", err) + } + if event != "endpoint" { + return fmt.Errorf("expected endpoint event, got %q", event) + } + c.path = path + + return nil +} + +func (c *sseConnection) SendMessage(data []byte) error { + if !c.connected { + return fmt.Errorf("not connected to MCP") + } + + if c.client == nil { + c.client = &http.Client{} + } + + // Track the request ID if it's a Call + msg, err := jsonrpc2.DecodeMessage(data) + if err == nil { + if call, ok := msg.(*jsonrpc2.Call); ok { + c.mu.Lock() + c.requestIDs[call.ID()] = struct{}{} + c.mu.Unlock() + } + } + + resp, err := c.client.Post(fmt.Sprintf("http://localhost:%d%s", mcpPort, c.path), "application/json", bytes.NewReader(data)) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != 202 { + return fmt.Errorf("error forwarding request: %v", resp.Status) + } + + return nil +} + +// CreateErrorResponse creates a JSON-RPC error response with the correct ID if available +func (c *sseConnection) CreateErrorResponse(id *jsonrpc2.ID, code int, message string) string { + // Build the error response + response := map[string]interface{}{ + "jsonrpc": "2.0", + "error": map[string]interface{}{ + "code": code, + "message": message, + }, + } + + // Include ID if available + if id != nil { + response["id"] = id + + // Remove from tracking as we're responding to it + c.mu.Lock() + delete(c.requestIDs, *id) + c.mu.Unlock() + } else { + response["id"] = nil + } + + // Marshal to JSON + jsonData, err := json.Marshal(response) + if err != nil { + // Fallback if marshaling fails + return fmt.Sprintf(`{"jsonrpc":"2.0","id":null,"error":{"code":%d,"message":"%s"}}`, code, message) + } + + return string(jsonData) +} + +// RemoveRequestID removes a request ID from tracking once a response is received +func (c *sseConnection) RemoveRequestID(id jsonrpc2.ID) { + c.mu.Lock() + delete(c.requestIDs, id) + c.mu.Unlock() +} + +var runCmd = &cobra.Command{ + Use: "run", + Short: "Runs an stdio-based MCP session", + Run: func(cmd *cobra.Command, args []string) { + + ctx := cmd.Context() + + if appID == "" { + appID = cmdutil.AppSlugOrLocalID() + } + + if root.Verbosity > 0 { + _, _ = fmt.Fprintf(os.Stderr, "Starting an MCP session for app %s\n", appID) + } + + conn := &sseConnection{appID: appID} + if err := conn.connect(ctx); err != nil { + fmt.Fprintf(os.Stderr, "Error connecting to MCP: %v\n", err) + os.Exit(1) + } + defer conn.Close() + + go func() { + for { + event, data, err := conn.Read() + if err != nil { + fmt.Fprintf(os.Stderr, "Error reading event: %v\n", err) + + conn.mu.Lock() + requestIDs := maps.Clone(conn.requestIDs) + conn.mu.Unlock() + for id := range requestIDs { + fmt.Println(conn.CreateErrorResponse(&id, -32700, "error")) + } + if err := conn.reconnect(ctx); err != nil { + fmt.Fprintf(os.Stderr, "Error reconnecting to MCP: %v\n", err) + os.Exit(1) + } + continue + } + if root.Verbosity > 0 { + fmt.Fprintf(os.Stderr, "Received event: %s: %s\n", event, data) + } + if event == "message" { + // If it's a response message, remove the ID from tracking + responseMsg := struct { + JSONRPC string `json:"jsonrpc"` + ID *jsonrpc2.ID `json:"id"` + Result interface{} `json:"result,omitempty"` + Error interface{} `json:"error,omitempty"` + }{} + + if err := json.Unmarshal([]byte(data), &responseMsg); err == nil && responseMsg.ID != nil { + conn.RemoveRequestID(*responseMsg.ID) + } + + fmt.Println(data) + } + } + }() + + stdinReader := startLineReader(ctx, bufio.NewReader(os.Stdin).ReadBytes) + if root.Verbosity > 0 { + _, _ = fmt.Fprintf(os.Stderr, "Listening on stdin for MCP requests\n\n") + } + + for { + line, err := stdinReader() + if err != nil { + if err == io.EOF || err == context.Canceled { + break + } + fmt.Fprintf(os.Stderr, "Error reading input: %v\n", err) + os.Exit(1) + } + if strings.TrimSpace(string(line)) == "" { + continue + } + + msg, err := jsonrpc2.DecodeMessage(line) + if err != nil { + fmt.Fprintf(os.Stderr, "Error decoding request: %v\n", err) + fmt.Println(conn.CreateErrorResponse(nil, -32700, "parse error")) + continue + } + + if root.Verbosity > 0 { + fmt.Fprintf(os.Stderr, "Sending request: %s\n", line) + } + + err = conn.SendMessage(line) + if err != nil { + fmt.Fprintf(os.Stderr, "Error sending message: %v\n", err) + + // Create error response with the request ID if available + var requestID *jsonrpc2.ID + if call, ok := msg.(*jsonrpc2.Call); ok { + id := call.ID() + requestID = &id + } + + fmt.Println(conn.CreateErrorResponse(requestID, -32700, "error sending message")) + continue + } + } + }, +} + +type lineResult[T any] struct { + res T + err error +} + +func startLineReader[T any](ctx context.Context, rd func(byte) (T, error)) func() (T, error) { + channel := make(chan lineResult[T]) + go func() { + for { + line, err := rd('\n') // wait for Enter key + if err != nil { + channel <- lineResult[T]{err: err} + return + } + channel <- lineResult[T]{res: line} + } + }() + return func() (T, error) { + var t T + select { + case <-ctx.Done(): + return t, ctx.Err() + case result := <-channel: + if result.err != nil { + return t, result.err + } + return result.res, nil + } + } +} + +func eventReader(reader func() (string, error)) func() (typ, data string, err error) { + return func() (typ, data string, err error) { + var line string + for { + line, err = reader() + if err != nil { + return "", "", err + } + if strings.HasPrefix(line, "event:") { + break + } + } + typ = strings.TrimSpace(strings.TrimPrefix(line, "event:")) + line, err = reader() + if err != nil { + return "", "", err + } + if !strings.HasPrefix(line, "data:") { + return "", "", fmt.Errorf("expected data: prefix, got %q", line) + } + data = strings.TrimSpace(strings.TrimPrefix(line, "data:")) + return typ, data, nil + } +} + +func init() { + mcpCmd.AddCommand(runCmd) + runCmd.Flags().StringVar(&appID, "app", "", "The app ID to use for the MCP session") + + mcpCmd.AddCommand(startCmd) + startCmd.Flags().StringVar(&appID, "app", "", "The app ID to use for the MCP session") + + root.Cmd.AddCommand(mcpCmd) +} diff --git a/cli/cmd/encore/namespace/namespace.go b/cli/cmd/encore/namespace/namespace.go new file mode 100644 index 0000000000..61af5a7f46 --- /dev/null +++ b/cli/cmd/encore/namespace/namespace.go @@ -0,0 +1,210 @@ +package namespace + +import ( + "bytes" + "cmp" + "context" + "encoding/json" + "fmt" + "os" + "slices" + "text/tabwriter" + "time" + + "github.com/spf13/cobra" + "google.golang.org/protobuf/encoding/protojson" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + daemonpb "encr.dev/proto/encore/daemon" +) + +var nsCmd = &cobra.Command{ + Use: "namespace", + Short: "Manage infrastructure namespaces", + Aliases: []string{"ns"}, +} + +func init() { + output := cmdutil.Oneof{Value: "columns", Allowed: []string{"columns", "json"}} + listCmd := &cobra.Command{ + Use: "list", + Short: "List infrastructure namespaces", + Aliases: []string{"ls"}, + Args: cobra.NoArgs, + Run: func(cmd *cobra.Command, args []string) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + appRoot, _ := cmdutil.AppRoot() + daemon := cmdutil.ConnectDaemon(ctx) + resp, err := daemon.ListNamespaces(ctx, &daemonpb.ListNamespacesRequest{AppRoot: appRoot}) + if err != nil { + cmdutil.Fatal(err) + } + nss := resp.Namespaces + + // Sort by active first, then name second. + slices.SortFunc(nss, func(a, b *daemonpb.Namespace) int { + if a.Active != b.Active { + if a.Active { + return -1 + } else { + return 1 + } + } + return cmp.Compare(a.Name, b.Name) + }) + + if output.Value == "json" { + var buf bytes.Buffer + buf.WriteByte('[') + for i, ns := range nss { + data, err := protojson.MarshalOptions{ + UseProtoNames: true, + EmitUnpopulated: true, + }.Marshal(ns) + if err != nil { + cmdutil.Fatal(err) + } + if i > 0 { + buf.WriteByte(',') + } + buf.Write(data) + } + buf.WriteByte(']') + + var dst bytes.Buffer + if err := json.Indent(&dst, buf.Bytes(), "", " "); err != nil { + cmdutil.Fatal(err) + } + _, _ = fmt.Fprintln(os.Stdout, dst.String()) + return + } + + w := tabwriter.NewWriter(os.Stdout, 0, 0, 3, ' ', tabwriter.StripEscape) + + _, _ = fmt.Fprint(w, "NAME\tID\tACTIVE\n") + + for _, ns := range nss { + active := "" + if ns.Active { + active = "yes" + } + _, _ = fmt.Fprintf(w, "%s\t%s\t%s\n", ns.Name, ns.Id, active) + } + _ = w.Flush() + }, + } + output.AddFlag(listCmd) + + nsCmd.AddCommand(listCmd) +} + +var createCmd = &cobra.Command{ + Use: "create NAME", + Short: "Create a new infrastructure namespace", + + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + appRoot, _ := cmdutil.AppRoot() + daemon := cmdutil.ConnectDaemon(ctx) + ns, err := daemon.CreateNamespace(ctx, &daemonpb.CreateNamespaceRequest{ + AppRoot: appRoot, + Name: args[0], + }) + if err != nil { + cmdutil.Fatal(err) + } + _, _ = fmt.Fprintf(os.Stdout, "created namespace %s\n", ns.Name) + }, +} + +var deleteCmd = &cobra.Command{ + Use: "delete NAME", + Short: "Delete an infrastructure namespace", + Aliases: []string{"del"}, + + Args: cobra.ExactArgs(1), + ValidArgsFunction: namespaceListCompletion, + Run: func(cmd *cobra.Command, args []string) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + appRoot, _ := cmdutil.AppRoot() + daemon := cmdutil.ConnectDaemon(ctx) + name := args[0] + _, err := daemon.DeleteNamespace(ctx, &daemonpb.DeleteNamespaceRequest{ + AppRoot: appRoot, + Name: name, + }) + if err != nil { + cmdutil.Fatal(err) + } + _, _ = fmt.Fprintf(os.Stdout, "deleted namespace %s\n", name) + }, +} + +func init() { + var create bool + switchCmd := &cobra.Command{ + Use: "switch [--create] NAME", + Short: "Switch to a different infrastructure namespace", + Long: `Switch to a specified infrastructure namespace. Subsequent commands will use the given namespace by default. + +If -c is specified, the namespace will first be created before switching to it. + +You can use '-' as the namespace name to switch back to the previously active namespace. +`, + + Args: cobra.ExactArgs(1), + ValidArgsFunction: namespaceListCompletion, + Run: func(cmd *cobra.Command, args []string) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + appRoot, _ := cmdutil.AppRoot() + daemon := cmdutil.ConnectDaemon(ctx) + ns, err := daemon.SwitchNamespace(ctx, &daemonpb.SwitchNamespaceRequest{ + AppRoot: appRoot, + Name: args[0], + Create: create, + }) + if err != nil { + cmdutil.Fatal(err) + } + _, _ = fmt.Fprintf(os.Stdout, "switched to namespace %s\n", ns.Name) + }, + } + + switchCmd.Flags().BoolVarP(&create, "create", "c", false, "create the namespace before switching") + nsCmd.AddCommand(switchCmd) +} + +func init() { + nsCmd.AddCommand(createCmd) + nsCmd.AddCommand(deleteCmd) + root.Cmd.AddCommand(nsCmd) +} + +func namespaceListCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + // List namespaces from the daemon for completion. + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + appRoot, _ := cmdutil.AppRoot() + daemon := cmdutil.ConnectDaemon(ctx) + resp, err := daemon.ListNamespaces(ctx, &daemonpb.ListNamespacesRequest{AppRoot: appRoot}) + if err != nil { + return nil, cobra.ShellCompDirectiveError + } + + namespaces := make([]string, len(resp.Namespaces)) + for i, ns := range resp.Namespaces { + namespaces[i] = ns.Name + } + return namespaces, cobra.ShellCompDirectiveNoFileComp +} diff --git a/cli/cmd/encore/rand.go b/cli/cmd/encore/rand.go new file mode 100644 index 0000000000..ce8dd29b93 --- /dev/null +++ b/cli/cmd/encore/rand.go @@ -0,0 +1,186 @@ +package main + +import ( + cryptorand "crypto/rand" + "encoding/base32" + "encoding/base64" + "encoding/hex" + "fmt" + "os" + "strconv" + "strings" + + "github.com/gofrs/uuid" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/pkg/words" +) + +var randCmd = &cobra.Command{ + Use: "rand", + Short: "Utilities for generating cryptographically secure random data", +} + +func init() { + rootCmd.AddCommand(randCmd) +} + +// UUID command +func init() { + var v1, v4, v6, v7 bool + uuidCmd := &cobra.Command{ + Use: "uuid [-1|-4|-6|-7]", + Short: "Generates a random UUID (defaults to version 4)", + Args: cobra.NoArgs, + Run: func(cmd *cobra.Command, args []string) { + versions := map[bool]func() (uuid.UUID, error){ + v1: uuid.NewV1, + v4: uuid.NewV4, + v6: uuid.NewV6, + v7: uuid.NewV7, + } + fn, ok := versions[true] + if !ok { + fatalf("unsupported UUID version") + } + u, err := fn() + if err != nil { + fatalf("failed to generate UUID: %v", err) + } + _, _ = fmt.Println(u.String()) + }, + } + uuidCmd.Flags().BoolVarP(&v1, "v1", "1", false, "Generate a version 1 UUID") + uuidCmd.Flags().BoolVarP(&v4, "v4", "4", true, "Generate a version 4 UUID") + uuidCmd.Flags().BoolVarP(&v6, "v6", "6", false, "Generate a version 6 UUID") + uuidCmd.Flags().BoolVarP(&v7, "v7", "7", false, "Generate a version 7 UUID") + uuidCmd.MarkFlagsMutuallyExclusive("v1", "v4", "v6", "v7") + + randCmd.AddCommand(uuidCmd) +} + +// Bytes command +func init() { + format := cmdutil.Oneof{ + Value: "hex", + Allowed: []string{"hex", "base32", "base32hex", "base32crockford", "base64", "base64url", "raw"}, + Flag: "format", + FlagShort: "f", + Desc: "Output format", + } + + noPadding := false + doFormat := func(data []byte) string { + switch format.Value { + case "hex": + return hex.EncodeToString(data) + case "base32": + enc := base32.StdEncoding + if noPadding { + enc = enc.WithPadding(base32.NoPadding) + } + return enc.EncodeToString(data) + case "base32hex": + enc := base32.HexEncoding + if noPadding { + enc = enc.WithPadding(base32.NoPadding) + } + return enc.EncodeToString(data) + case "base32crockford": + enc := base32.NewEncoding("0123456789ABCDEFGHJKMNPQRSTVWXYZ") + if noPadding { + enc = enc.WithPadding(base32.NoPadding) + } + return enc.EncodeToString(data) + case "base64": + enc := base64.StdEncoding + if noPadding { + enc = enc.WithPadding(base64.NoPadding) + } + return enc.EncodeToString(data) + case "base64url": + enc := base64.URLEncoding + if noPadding { + enc = enc.WithPadding(base64.NoPadding) + } + return enc.EncodeToString(data) + default: + fatalf("unsupported output format: %s", format.Value) + panic("unreachable") + } + } + + bytesCmd := &cobra.Command{ + Use: "bytes BYTES [-f " + format.Alternatives() + "]", + Short: "Generates random bytes and outputs them in the specified format", + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + num, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + fatalf("invalid number of bytes: %v", err) + } else if num < 1 { + fatalf("number of bytes must be positive") + } else if num > 1024*1024 { + fatalf("too many bytes requested") + } + + data := make([]byte, num) + _, err = cryptorand.Read(data) + if err != nil { + fatalf("failed to generate random bytes: %v", err) + } + + if format.Value == "raw" { + _, err = os.Stdout.Write(data) + if err != nil { + fatalf("failed to write: %v", err) + } + } else { + formatted := doFormat(data) + if _, err := os.Stdout.WriteString(formatted); err != nil { + fatalf("failed to write: %v", err) + } + _, _ = os.Stdout.Write([]byte{'\n'}) + } + }, + } + + format.AddFlag(bytesCmd) + bytesCmd.Flags().BoolVar(&noPadding, "no-padding", false, "omit padding characters from base32/base64 output") + randCmd.AddCommand(bytesCmd) +} + +// Words command +func init() { + var sep string + wordsCmd := &cobra.Command{ + Use: "words [--sep=SEPARATOR] NUM", + Short: "Generates random 4-5 letter words for memorable passphrases", + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + num, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + fatalf("invalid number of words: %v", err) + } else if num < 1 { + fatalf("number of words must be positive") + } else if num > 1024 { + fatalf("too many words requested") + } + + selected, err := words.Select(int(num)) + if err != nil { + fatalf("failed to select words: %v", err) + } + + formatted := strings.Join(selected, sep) + if _, err := os.Stdout.WriteString(formatted); err != nil { + fatalf("failed to write: %v", err) + } + _, _ = os.Stdout.Write([]byte{'\n'}) + }, + } + + wordsCmd.Flags().StringVarP(&sep, "sep", "s", " ", "separator between words") + randCmd.AddCommand(wordsCmd) +} diff --git a/cli/cmd/encore/root/rootcmd.go b/cli/cmd/encore/root/rootcmd.go new file mode 100644 index 0000000000..fc1fd99af7 --- /dev/null +++ b/cli/cmd/encore/root/rootcmd.go @@ -0,0 +1,60 @@ +package root + +import ( + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "github.com/spf13/cobra" + + "encr.dev/pkg/errlist" +) + +var ( + Verbosity int + traceFile string + + // TraceFile is the file to write trace logs to. + // If nil (the default), trace logs are not written. + TraceFile *string +) + +var preRuns []func(cmd *cobra.Command, args []string) + +// AddPreRun adds a function to be executed before the command runs. +func AddPreRun(f func(cmd *cobra.Command, args []string)) { + preRuns = append(preRuns, f) +} + +var Cmd = &cobra.Command{ + Use: "encore", + Short: "encore is the fastest way of developing backend applications", + SilenceErrors: true, // We'll handle displaying an error in our main func + CompletionOptions: cobra.CompletionOptions{ + HiddenDefaultCmd: true, // Hide the "completion" command from help (used for generating auto-completions for the shell) + }, + PersistentPreRun: func(cmd *cobra.Command, args []string) { + if traceFile != "" { + TraceFile = &traceFile + } + + level := zerolog.InfoLevel + if Verbosity == 1 { + level = zerolog.DebugLevel + } else if Verbosity >= 2 { + level = zerolog.TraceLevel + } + + if Verbosity >= 1 { + errlist.Verbose = true + } + log.Logger = log.Logger.Level(level) + + for _, f := range preRuns { + f(cmd, args) + } + }, +} + +func init() { + Cmd.PersistentFlags().CountVarP(&Verbosity, "verbose", "v", "verbose output") + Cmd.PersistentFlags().StringVar(&traceFile, "trace", "", "file to write execution trace data to") +} diff --git a/cli/cmd/encore/run.go b/cli/cmd/encore/run.go index 8c808fcb91..57656a21e6 100644 --- a/cli/cmd/encore/run.go +++ b/cli/cmd/encore/run.go @@ -3,88 +3,150 @@ package main import ( "context" "fmt" - "io" + "net" "os" "os/signal" + "strconv" - daemonpb "encr.dev/proto/encore/daemon" + "github.com/logrusorgru/aurora/v3" "github.com/spf13/cobra" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" + "golang.org/x/term" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + "encr.dev/cli/internal/onboarding" + daemonpb "encr.dev/proto/encore/daemon" ) var ( - tunnel bool - debug bool - watch bool + color bool + noColor bool // for "--no-color" compatibility + debug = cmdutil.Oneof{ + Value: "", + NoOptDefVal: "enabled", + Allowed: []string{"enabled", "break"}, + Flag: "debug", + FlagShort: "", // no short flag + Desc: "Compile for debugging (disables some optimizations)", + TypeDesc: "string", + } + watch bool + listen string + port uint + jsonLogs bool + browser = cmdutil.Oneof{ + Value: "auto", + Allowed: []string{"auto", "never", "always"}, + Flag: "browser", + FlagShort: "", // no short flag + Desc: "Whether to open the local development dashboard in the browser on startup", + TypeDesc: "string", + } ) -var runCmd = &cobra.Command{ - Use: "run", - Short: "Runs your application", - Run: func(cmd *cobra.Command, args []string) { - appRoot, wd := determineAppRoot() - runApp(appRoot, wd, tunnel, watch) - }, +func init() { + runCmd := &cobra.Command{ + Use: "run [--debug] [--watch=true] [--port=4000] [--listen=]", + Short: "Runs your application", + Args: cobra.NoArgs, + Run: func(cmd *cobra.Command, args []string) { + appRoot, wd := determineAppRoot() + // If the user didn't explicitly set --watch and we're in debug mode, disable watching + // as we typically don't want to swap the process when the user is debugging. + if !cmd.Flag("watch").Changed && debug.Value != "" { + watch = false + } + runApp(appRoot, wd) + }, + } + + isTerm := term.IsTerminal(int(os.Stdout.Fd())) + + rootCmd.AddCommand(runCmd) + runCmd.Flags().BoolVarP(&watch, "watch", "w", true, "Watch for changes and live-reload") + runCmd.Flags().StringVar(&listen, "listen", "", "Address to listen on (for example \"0.0.0.0:4000\")") + runCmd.Flags().UintVarP(&port, "port", "p", 4000, "Port to listen on") + runCmd.Flags().BoolVar(&jsonLogs, "json", false, "Display logs in JSON format") + runCmd.Flags().StringVarP(&nsName, "namespace", "n", "", "Namespace to use (defaults to active namespace)") + runCmd.Flags().BoolVar(&color, "color", isTerm, "Whether to display colorized output") + runCmd.Flags().BoolVar(&noColor, "no-color", false, "Equivalent to --color=false") + runCmd.Flags().MarkHidden("no-color") + debug.AddFlag(runCmd) + browser.AddFlag(runCmd) } // runApp runs the app. -func runApp(appRoot, wd string, tunnel, watch bool) { - interrupt := make(chan os.Signal, 1) - signal.Notify(interrupt, os.Interrupt) +func runApp(appRoot, wd string) { + ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) + defer cancel() + + // Determine listen addr. + var listenAddr string - ctx, cancel := context.WithCancel(context.Background()) - go func() { - <-interrupt - cancel() - }() + if listen == "" { + // If we have no listen address at all, listen on localhost. + // (we do this so MacOS's firewall doesn't ask for permission for the daemon to listen on all interfaces) + listenAddr = fmt.Sprintf("127.0.0.1:%d", port) + } else if _, _, err := net.SplitHostPort(listen); err == nil { + // If --listen is given with a port, use that directly and ignore --port. + listenAddr = listen + } else { + // Otherwise use --listen as the host and --port as the port. + listenAddr = net.JoinHostPort(listen, strconv.Itoa(int(port))) + } + + browserMode := daemonpb.RunRequest_BROWSER_AUTO + switch browser.Value { + case "auto": + browserMode = daemonpb.RunRequest_BROWSER_AUTO + case "never": + browserMode = daemonpb.RunRequest_BROWSER_NEVER + case "always": + browserMode = daemonpb.RunRequest_BROWSER_ALWAYS + } + + debugMode := daemonpb.RunRequest_DEBUG_DISABLED + switch debug.Value { + case "enabled": + debugMode = daemonpb.RunRequest_DEBUG_ENABLED + case "break": + debugMode = daemonpb.RunRequest_DEBUG_BREAK + } daemon := setupDaemon(ctx) stream, err := daemon.Run(ctx, &daemonpb.RunRequest{ AppRoot: appRoot, - Tunnel: tunnel, - Debug: debug, + DebugMode: debugMode, Watch: watch, WorkingDir: wd, + ListenAddr: listenAddr, + Environ: os.Environ(), + TraceFile: root.TraceFile, + Namespace: nonZeroPtr(nsName), + Browser: browserMode, }) if err != nil { fatal(err) } - for { - msg, err := stream.Recv() - if err == io.EOF || status.Code(err) == codes.Canceled { - return - } else if err != nil { - fatal(err) - } - switch resp := msg.Msg.(type) { - case *daemonpb.RunMessage_Started: - fmt.Fprintf(os.Stderr, "Running on http://localhost:%d\n", resp.Started.Port) - if debug && resp.Started.Pid > 0 { - fmt.Fprintf(os.Stderr, "Process ID (for debugging): %d\n", resp.Started.Pid) - } - if url := resp.Started.TunnelUrl; url != "" { - fmt.Fprintf(os.Stderr, "Tunnel active on %s\n", url) - } + cmdutil.ClearTerminalExceptFirstNLines(1) - case *daemonpb.RunMessage_Output: - if out := resp.Output.Stdout; len(out) > 0 { - os.Stdout.Write(out) - } - if out := resp.Output.Stderr; len(out) > 0 { - os.Stderr.Write(out) + var converter cmdutil.OutputConverter + if !jsonLogs { + converter = cmdutil.ConvertJSONLogs(cmdutil.Colorize(color && !noColor)) + } + code := cmdutil.StreamCommandOutput(stream, converter) + if code == 0 { + if state, err := onboarding.Load(); err == nil { + if state.DeployHint.Set() { + if err := state.Write(); err == nil { + _, _ = fmt.Println(aurora.Sprintf("\nHint: deploy your app to the cloud by running: %s", aurora.Cyan("git push encore"))) + } } - - case *daemonpb.RunMessage_Exit: - os.Exit(int(resp.Exit.Code)) } } + os.Exit(code) } func init() { - rootCmd.AddCommand(runCmd) - runCmd.Flags().BoolVar(&tunnel, "tunnel", false, "Create a tunnel to your machine for others to test against") - runCmd.Flags().BoolVar(&debug, "debug", false, "Compile for debugging (disables some optimizations)") - runCmd.Flags().BoolVarP(&watch, "watch", "w", true, "Watch for changes and live-reload") } diff --git a/cli/cmd/encore/secret.go b/cli/cmd/encore/secret.go deleted file mode 100644 index 8f4fdfe095..0000000000 --- a/cli/cmd/encore/secret.go +++ /dev/null @@ -1,107 +0,0 @@ -package main - -import ( - "bytes" - "context" - "fmt" - "io/ioutil" - "os" - "syscall" - "time" - - daemonpb "encr.dev/proto/encore/daemon" - "github.com/spf13/cobra" - "golang.org/x/crypto/ssh/terminal" -) - -var secretCmd = &cobra.Command{ - Use: "secret", - Short: "Secret management commands", -} - -var ( - secretDevFlag bool - secretProdFlag bool -) - -var setSecretCmd = &cobra.Command{ - Use: "set --dev|prod ", - Short: "Sets a secret value", - Example: ` -Entering a secret directly in terminal: - - $ encore secret set --dev MySecret - Enter secret value: ... - Successfully created development secret MySecret. - -Piping a secret from a file: - - $ encore secret set --dev MySecret < my-secret.txt - Successfully created development secret MySecret. - -Note that this strips trailing newlines from the secret value. - `, - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - if !secretDevFlag && !secretProdFlag { - fatal("must specify either --dev or --prod.") - } else if secretDevFlag && secretProdFlag { - fatal("cannot specify both --dev and --prod.") - } - - appRoot, _ := determineAppRoot() - - key := args[0] - var value string - fd := syscall.Stdin - if terminal.IsTerminal(int(fd)) { - fmt.Fprint(os.Stderr, "Enter secret value: ") - data, err := terminal.ReadPassword(int(fd)) - if err != nil { - fatal(err) - } - value = string(data) - fmt.Fprintln(os.Stderr) - } else { - data, err := ioutil.ReadAll(os.Stdin) - if err != nil { - fatal(err) - } - value = string(bytes.TrimRight(data, "\r\n")) - } - - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) - defer cancel() - - daemon := setupDaemon(ctx) - typName := "development" - typ := daemonpb.SetSecretRequest_DEVELOPMENT - if secretProdFlag { - typName = "production" - typ = daemonpb.SetSecretRequest_PRODUCTION - } - - resp, err := daemon.SetSecret(ctx, &daemonpb.SetSecretRequest{ - AppRoot: appRoot, - Key: key, - Value: value, - Type: typ, - }) - if err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - } - if resp.Created { - fmt.Fprintf(os.Stderr, "Successfully created %s secret %s!\n", typName, key) - } else { - fmt.Fprintf(os.Stderr, "Successfully updated %s secret %s.\n", typName, key) - } - }, -} - -func init() { - rootCmd.AddCommand(secretCmd) - secretCmd.AddCommand(setSecretCmd) - setSecretCmd.Flags().BoolVarP(&secretDevFlag, "dev", "d", false, "To set the secret for development use") - setSecretCmd.Flags().BoolVarP(&secretProdFlag, "prod", "p", false, "To set the secret for production use") -} diff --git a/cli/cmd/encore/secrets/archive.go b/cli/cmd/encore/secrets/archive.go new file mode 100644 index 0000000000..13ceb5730b --- /dev/null +++ b/cli/cmd/encore/secrets/archive.go @@ -0,0 +1,60 @@ +package secrets + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" +) + +var archiveSecretCmd = &cobra.Command{ + Use: "archive ", + Short: "Archives a secret value", + DisableFlagsInUseLine: true, + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + doArchiveOrUnarchive(args[0], true) + }, +} + +var unarchiveSecretCmd = &cobra.Command{ + Use: "unarchive ", + Short: "Unarchives a secret value", + DisableFlagsInUseLine: true, + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + doArchiveOrUnarchive(args[0], false) + }, +} + +func doArchiveOrUnarchive(groupID string, archive bool) { + if !strings.HasPrefix(groupID, "secgrp") { + cmdutil.Fatal("the id must begin with 'secgrp_'. Valid ids can be found with 'encore secret list '.") + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + err := platform.UpdateSecretGroup(ctx, platform.UpdateSecretGroupParams{ + ID: groupID, + Archived: &archive, + }) + if err != nil { + cmdutil.Fatal(err) + } + if archive { + fmt.Printf("Successfully archived secret group %s.\n", groupID) + } else { + fmt.Printf("Successfully unarchived secret group %s.\n", groupID) + } +} + +func init() { + secretCmd.AddCommand(archiveSecretCmd) + secretCmd.AddCommand(unarchiveSecretCmd) +} diff --git a/cli/cmd/encore/secrets/check.go b/cli/cmd/encore/secrets/check.go new file mode 100644 index 0000000000..77e395b442 --- /dev/null +++ b/cli/cmd/encore/secrets/check.go @@ -0,0 +1,221 @@ +package secrets + +import ( + "bytes" + "context" + "fmt" + "os" + "slices" + "strings" + "text/tabwriter" + "time" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" + "encr.dev/cli/internal/platform/gql" +) + +var checkSecretCmd = &cobra.Command{ + Use: "check [envs...]", + Short: "Check if secrets are properly set across environments", + Long: `Check if secrets are properly set across specified environments. +This command validates that all secrets required by your application +are configured in the specified environments. + +Example usage: + encore secret check prod dev + encore secret check production development`, + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + if len(args) == 0 { + cmdutil.Fatal("at least one environment must be specified") + } + + appSlug := cmdutil.AppSlug() + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + // Get all secrets for the app + secrets, err := platform.ListSecretGroups(ctx, appSlug, nil) + if err != nil { + cmdutil.Fatal(err) + } + + // Normalize environment names + envNames := normalizeEnvNames(args) + + // Check secrets across environments + result := CheckSecretsAcrossEnvs(secrets, envNames) + + // Display results + DisplayCheckResults(result, envNames) + + // Exit with error if any secrets are missing + if result.HasMissing { + os.Exit(1) + } + }, +} + +type CheckResult struct { + SecretResults []SecretCheckResult + HasMissing bool +} + +type SecretCheckResult struct { + Key string + EnvStatus map[string]bool // env name -> has secret +} + +func normalizeEnvNames(args []string) []string { + var normalized []string + for _, arg := range args { + switch strings.ToLower(arg) { + case "prod", "production": + normalized = append(normalized, "production") + case "dev", "development": + normalized = append(normalized, "development") + case "local": + normalized = append(normalized, "local") + case "preview": + normalized = append(normalized, "preview") + default: + // Assume it's a specific environment name + normalized = append(normalized, arg) + } + } + return normalized +} + +func CheckSecretsAcrossEnvs(secrets []*gql.Secret, envNames []string) CheckResult { + var result CheckResult + + for _, secret := range secrets { + secretResult := SecretCheckResult{ + Key: secret.Key, + EnvStatus: make(map[string]bool), + } + + // Initialize all environments as missing + for _, envName := range envNames { + secretResult.EnvStatus[envName] = false + } + + // Check which environments have this secret + for _, group := range secret.Groups { + if group.ArchivedAt != nil { + continue // Skip archived secrets + } + + for _, selector := range group.Selector { + switch sel := selector.(type) { + case *gql.SecretSelectorEnvType: + if contains(envNames, sel.Kind) { + secretResult.EnvStatus[sel.Kind] = true + } + case *gql.SecretSelectorSpecificEnv: + if contains(envNames, sel.Env.Name) { + secretResult.EnvStatus[sel.Env.Name] = true + } + } + } + } + + // Check if any environment is missing this secret + for _, hasSecret := range secretResult.EnvStatus { + if !hasSecret { + result.HasMissing = true + break + } + } + + result.SecretResults = append(result.SecretResults, secretResult) + } + + return result +} + +func DisplayCheckResults(result CheckResult, envNames []string) { + if len(result.SecretResults) == 0 { + fmt.Println("No secrets found.") + return + } + + var buf bytes.Buffer + w := tabwriter.NewWriter(&buf, 0, 0, 3, ' ', tabwriter.StripEscape) + + // Header + header := "Secret Key" + for _, envName := range envNames { + header += fmt.Sprintf("\t%s", strings.Title(envName)) + } + header += "\t\n" + fmt.Fprint(w, header) + + const ( + checkYes = "\u2713" + checkNo = "\u2717" + ) + + missingCount := 0 + + // Sort secrets by key for consistent output + slices.SortFunc(result.SecretResults, func(a, b SecretCheckResult) int { + return strings.Compare(a.Key, b.Key) + }) + + for _, secretResult := range result.SecretResults { + line := secretResult.Key + secretHasMissing := false + + for _, envName := range envNames { + hasSecret := secretResult.EnvStatus[envName] + if hasSecret { + line += fmt.Sprintf("\t%s", checkYes) + } else { + line += fmt.Sprintf("\t%s", checkNo) + secretHasMissing = true + } + } + line += "\t\n" + + if secretHasMissing { + missingCount++ + } + + fmt.Fprint(w, line) + } + + w.Flush() + + // Add color to the checkmarks + r := strings.NewReplacer(checkYes, color.GreenString(checkYes), checkNo, color.RedString(checkNo)) + r.WriteString(os.Stdout, buf.String()) + + // Print summary + if result.HasMissing { + if missingCount == 1 { + fmt.Printf("\nError: There is 1 secret missing.\n") + } else { + fmt.Printf("\nError: There are %d secrets missing.\n", missingCount) + } + } else { + fmt.Printf("\nAll secrets are properly configured across specified environments.\n") + } +} + +func contains(slice []string, item string) bool { + for _, s := range slice { + if s == item { + return true + } + } + return false +} + +func init() { + secretCmd.AddCommand(checkSecretCmd) +} \ No newline at end of file diff --git a/cli/cmd/encore/secrets/list.go b/cli/cmd/encore/secrets/list.go new file mode 100644 index 0000000000..d481b95ec0 --- /dev/null +++ b/cli/cmd/encore/secrets/list.go @@ -0,0 +1,169 @@ +package secrets + +import ( + "bytes" + "cmp" + "context" + "fmt" + "os" + "slices" + "strings" + "text/tabwriter" + "time" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" + "encr.dev/cli/internal/platform/gql" +) + +var listSecretCmd = &cobra.Command{ + Use: "list [keys...]", + Short: "Lists secrets, optionally for a specific key", + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + appSlug := cmdutil.AppSlug() + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + var keys []string + if len(args) > 0 { + keys = args + } + secrets, err := platform.ListSecretGroups(ctx, appSlug, keys) + if err != nil { + cmdutil.Fatal(err) + } + + if keys == nil { + // Print secrets overview + var buf bytes.Buffer + w := tabwriter.NewWriter(&buf, 0, 0, 3, ' ', tabwriter.StripEscape) + + _, _ = fmt.Fprint(w, "Secret Key\tProduction\tDevelopment\tLocal\tPreview\tSpecific Envs\t\n") + const ( + checkYes = "\u2713" + checkNo = "\u2717" + ) + for _, s := range secrets { + render := func(b bool) string { + if b { + return checkYes + } else { + return checkNo + } + } + d := getSecretEnvDesc(s.Groups) + if !d.hasAny { + continue + } + _, _ = fmt.Fprintf(w, "%s\t%v\t%v\t%v\t%v\t", s.Key, + render(d.prod), render(d.dev), render(d.local), render(d.preview)) + // Render specific envs, if any + for i, env := range d.specific { + if i > 0 { + _, _ = fmt.Fprintf(w, ",") + } + _, _ = fmt.Fprintf(w, "%s", env.Name) + } + + _, _ = fmt.Fprint(w, "\t\n") + } + _ = w.Flush() + + // Add color to the checkmarks now that the table is correctly laid out. + // We can't do it before since the tabwriter will get the alignment wrong + // if we include a bunch of ANSI escape codes that it doesn't understand. + r := strings.NewReplacer(checkYes, color.GreenString(checkYes), checkNo, color.RedString(checkNo)) + _, _ = r.WriteString(os.Stdout, buf.String()) + } else { + // Specific secrets + w := tabwriter.NewWriter(os.Stdout, 0, 0, 3, ' ', 0) + _, _ = fmt.Fprint(w, "ID\tSecret Key\tEnvironment(s)\t\n") + + slices.SortFunc(secrets, func(a, b *gql.Secret) int { + return cmp.Compare(a.Key, b.Key) + }) + for _, s := range secrets { + // Sort the archived groups to the end + slices.SortFunc(s.Groups, func(a, b *gql.SecretGroup) int { + aa, ab := a.ArchivedAt != nil, b.ArchivedAt != nil + if aa != ab { + if aa { + return 1 + } else { + return -1 + } + } else if aa { + return a.ArchivedAt.Compare(*b.ArchivedAt) + } else { + return cmp.Compare(a.ID, b.ID) + } + }) + + for _, g := range s.Groups { + var sel []string + for _, s := range g.Selector { + switch s := s.(type) { + case *gql.SecretSelectorSpecificEnv: + // If we have a specific environment, render the name + // instead of the id (which is the default when using s.String()). + sel = append(sel, "env:"+s.Env.Name) + default: + sel = append(sel, s.String()) + } + } + + s := fmt.Sprintf("%s\t%s\t%s\t", g.ID, s.Key, strings.Join(sel, ", ")) + if g.ArchivedAt != nil { + s += "(archived)\t" + _, _ = color.New(color.Concealed).Fprintln(w, s) + } else { + _, _ = fmt.Fprintln(w, s) + } + } + } + _ = w.Flush() + } + }, +} + +func init() { + secretCmd.AddCommand(listSecretCmd) +} + +type secretEnvDesc struct { + hasAny bool // if there are any non-archived groups at all + prod, dev, local, preview bool + specific []*gql.Env +} + +func getSecretEnvDesc(groups []*gql.SecretGroup) secretEnvDesc { + var desc secretEnvDesc + for _, g := range groups { + if g.ArchivedAt != nil { + continue + } + desc.hasAny = true + for _, sel := range g.Selector { + switch sel := sel.(type) { + case *gql.SecretSelectorEnvType: + switch sel.Kind { + case "production": + desc.prod = true + case "development": + desc.dev = true + case "local": + desc.local = true + case "preview": + desc.preview = true + } + case *gql.SecretSelectorSpecificEnv: + desc.specific = append(desc.specific, sel.Env) + } + } + } + return desc +} diff --git a/cli/cmd/encore/secrets/secrets.go b/cli/cmd/encore/secrets/secrets.go new file mode 100644 index 0000000000..6a057bb61e --- /dev/null +++ b/cli/cmd/encore/secrets/secrets.go @@ -0,0 +1,17 @@ +package secrets + +import ( + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/root" +) + +var secretCmd = &cobra.Command{ + Use: "secret", + Short: "Secret management commands", + Aliases: []string{"secrets"}, +} + +func init() { + root.Cmd.AddCommand(secretCmd) +} diff --git a/cli/cmd/encore/secrets/set.go b/cli/cmd/encore/secrets/set.go new file mode 100644 index 0000000000..d07fd3369d --- /dev/null +++ b/cli/cmd/encore/secrets/set.go @@ -0,0 +1,272 @@ +package secrets + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "os" + "slices" + "sort" + "strings" + "syscall" + "time" + + "github.com/cockroachdb/errors" + "github.com/spf13/cobra" + "golang.org/x/crypto/ssh/terminal" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/internal/platform" + "encr.dev/cli/internal/platform/gql" + daemonpb "encr.dev/proto/encore/daemon" +) + +var setSecretCmd = &cobra.Command{ + Use: "set --type ", + Short: "Sets a secret value", + Long: ` +Sets a secret value for one or more environment types. + +The valid environment types are 'prod', 'dev', 'pr' and 'local'. +`, + + Example: ` +Entering a secret directly in terminal: + + $ encore secret set --type dev,local MySecret + Enter secret value: ... + Successfully created secret value for MySecret. + +Piping a secret from a file: + + $ encore secret set --type dev,local,pr MySecret < my-secret.txt + Successfully created secret value for MySecret. + +Note that this strips trailing newlines from the secret value.`, + Args: cobra.ExactArgs(1), + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + setSecret(args[0]) + }, +} + +var secretEnvs secretEnvSelector + +type secretEnvSelector struct { + devFlag bool + prodFlag bool + envTypes []string + envNames []string +} + +func init() { + secretCmd.AddCommand(setSecretCmd) + setSecretCmd.Flags().BoolVarP(&secretEnvs.devFlag, "dev", "d", false, "To set the secret for development use") + setSecretCmd.Flags().BoolVarP(&secretEnvs.prodFlag, "prod", "p", false, "To set the secret for production use") + setSecretCmd.Flags().StringSliceVarP(&secretEnvs.envTypes, "type", "t", nil, "environment type(s) to set for (comma-separated list)") + setSecretCmd.Flags().StringSliceVarP(&secretEnvs.envNames, "env", "e", nil, "environment name(s) to set for (comma-separated list)") + _ = setSecretCmd.Flags().MarkHidden("dev") + _ = setSecretCmd.Flags().MarkHidden("prod") +} + +func setSecret(key string) { + plaintextValue := readSecretValue() + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + appRoot, _ := cmdutil.AppRoot() + appSlug := cmdutil.AppSlug() + sel := secretEnvs.ParseSelector(ctx, appSlug) + + app, err := platform.GetApp(ctx, appSlug) + if err != nil { + cmdutil.Fatalf("unable to lookup app %s: %v", appSlug, err) + } + + // Does a matching secret group already exist? + secrets, err := platform.ListSecretGroups(ctx, app.Slug, []string{key}) + if err != nil { + cmdutil.Fatalf("unable to list secrets: %v", err) + } + + if matching := findMatchingSecretGroup(secrets, key, sel); matching != nil { + // We found a matching secret group. Update it. + err := platform.CreateSecretVersion(ctx, platform.CreateSecretVersionParams{ + GroupID: matching.ID, + PlaintextValue: plaintextValue, + Etag: matching.Etag, + }) + if err != nil { + cmdutil.Fatalf("unable to update secret: %v", err) + } + fmt.Printf("Successfully updated secret value for %s.\n", key) + return + } + + // Otherwise create a new secret group. + err = platform.CreateSecretGroup(ctx, platform.CreateSecretGroupParams{ + AppID: app.ID, + Key: key, + PlaintextValue: plaintextValue, + Selector: sel, + Description: "", // not yet supported from CLI + }) + if err != nil { + if ce, ok := getConflictError(err); ok { + var errMsg strings.Builder + fmt.Fprintln(&errMsg, "the environment selection conflicts with other secret values:") + for _, c := range ce.Conflicts { + fmt.Fprintf(&errMsg, "\t%s %s\n", c.GroupID, strings.Join(c.Conflicts, ", ")) + } + cmdutil.Fatal(errMsg.String()) + } + cmdutil.Fatalf("unable to create secret: %v", err) + } + + daemon := cmdutil.ConnectDaemon(ctx) + if _, err := daemon.SecretsRefresh(ctx, &daemonpb.SecretsRefreshRequest{AppRoot: appRoot}); err != nil { + fmt.Fprintln(os.Stderr, "warning: failed to refresh secret secret, skipping:", err) + } + + fmt.Printf("Successfully created secret value for %s.\n", key) +} + +func (s secretEnvSelector) ParseSelector(ctx context.Context, appSlug string) []gql.SecretSelector { + if s.devFlag && s.prodFlag { + cmdutil.Fatal("cannot specify both --dev and --prod") + } else if s.devFlag && (len(s.envTypes) > 0 || len(s.envNames) > 0) { + cmdutil.Fatal("cannot combine --dev with --type/--env") + } else if s.prodFlag && (len(s.envTypes) > 0 || len(s.envNames) > 0) { + cmdutil.Fatal("cannot combine --prod with --type/--env") + } + + // Look up the environments + envMap := make(map[string]string) // name -> id + envs, err := platform.ListEnvs(ctx, appSlug) + if err != nil { + cmdutil.Fatalf("unable to list environments: %v", err) + } + for _, env := range envs { + envMap[env.Slug] = env.ID + } + + var sel []gql.SecretSelector + if s.devFlag { + sel = append(sel, + &gql.SecretSelectorEnvType{Kind: "development"}, + &gql.SecretSelectorEnvType{Kind: "preview"}, + &gql.SecretSelectorEnvType{Kind: "local"}, + ) + } else if s.prodFlag { + sel = append(sel, &gql.SecretSelectorEnvType{Kind: "production"}) + } else { + // Parse env types and env names + seenTypes := make(map[string]bool) + validTypes := map[string]string{ + // Actual names + "development": "development", + "production": "production", + "preview": "preview", + "local": "local", + + // Aliases + "dev": "development", + "prod": "production", + "pr": "preview", + "ephemeral": "preview", + } + + for _, t := range s.envTypes { + val, ok := validTypes[t] + if !ok { + cmdutil.Fatalf("invalid environment type %q", t) + } + if !seenTypes[val] { + seenTypes[val] = true + sel = append(sel, &gql.SecretSelectorEnvType{Kind: val}) + } + } + for _, n := range s.envNames { + envID, ok := envMap[n] + if !ok { + cmdutil.Fatalf("environment %q not found", n) + } + sel = append(sel, &gql.SecretSelectorSpecificEnv{Env: &gql.Env{ID: envID}}) + } + } + + if len(sel) == 0 { + cmdutil.Fatal("must specify at least one environment with --type/--env (or --dev/--prod)") + } + return sel +} + +// readSecretValue reads the secret value from the user. +// If it's a terminal it becomes an interactive prompt, +// otherwise it reads from stdin. +func readSecretValue() string { + var value string + fd := syscall.Stdin + if terminal.IsTerminal(int(fd)) { + fmt.Fprint(os.Stderr, "Enter secret value: ") + data, err := terminal.ReadPassword(int(fd)) + if err != nil { + cmdutil.Fatal(err) + } + value = string(data) + fmt.Fprintln(os.Stderr) + } else { + data, err := io.ReadAll(os.Stdin) + if err != nil { + cmdutil.Fatal(err) + } + value = string(bytes.TrimRight(data, "\r\n")) + } + return value +} + +// findMatchingSecretGroup find whether a matching secret group already exists +// for the given secret key and selector. +func findMatchingSecretGroup(secrets []*gql.Secret, key string, selector []gql.SecretSelector) *gql.SecretGroup { + // canonicalize returns the secret selectors in canonical form + canonicalize := func(sels []gql.SecretSelector) []string { + var strs []string + for _, s := range sels { + strs = append(strs, s.String()) + } + sort.Strings(strs) + return strs + } + + want := canonicalize(selector) + for _, s := range secrets { + if s.Key == key { + for _, g := range s.Groups { + got := canonicalize(g.Selector) + if slices.Equal(got, want) { + return g + } + } + } + } + return nil +} + +func getConflictError(err error) (*gql.ConflictError, bool) { + var gqlErr gql.ErrorList + if !errors.As(err, &gqlErr) { + return nil, false + } + for _, e := range gqlErr { + if conflict := e.Extensions["conflict"]; len(conflict) > 0 { + var cerr gql.ConflictError + if err := json.Unmarshal(conflict, &cerr); err == nil { + return &cerr, true + } + } + } + return nil, false +} diff --git a/cli/cmd/encore/sqlc.go b/cli/cmd/encore/sqlc.go new file mode 100644 index 0000000000..86de21206e --- /dev/null +++ b/cli/cmd/encore/sqlc.go @@ -0,0 +1,178 @@ +package main + +import ( + "bufio" + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/golang/protobuf/proto" + "github.com/spf13/cobra" + "github.com/sqlc-dev/sqlc/pkg/cli" + "google.golang.org/protobuf/encoding/protojson" + + "encr.dev/proto/encore/daemon" +) + +type sqlcSQL struct { + Schema string `json:"schema"` + Queries string `json:"queries"` + Engine string `json:"engine"` + Codegen []sqlcCodegen `json:"codegen"` +} + +type sqlcCodegen struct { + Out string `json:"out"` + Plugin string `json:"plugin"` +} + +type sqlcPlugin struct { + Name string `json:"name"` + Process sqlcProcess `json:"process"` +} + +type sqlcProcess struct { + Cmd string `json:"cmd"` +} + +type sqlcConfig struct { + Version string `json:"version"` + SQL []sqlcSQL `json:"sql"` + Plugins []sqlcPlugin `json:"plugins"` +} + +func init() { + var useProto bool + genCmd := &cobra.Command{ + Use: "generate-sql-schema ", + Short: "Plugin for SQLC: stores the parsed sqlc model in a protobuf file", + Hidden: true, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + schemaPath, err := filepath.Abs(args[0]) + if err != nil { + return err + } + tmpDir, err := os.MkdirTemp("", "encore-sqlc") + if err != nil { + return err + } + defer func() { + _ = os.RemoveAll(tmpDir) + }() + + sqlcPath := filepath.Join(tmpDir, "sqlc.json") + queryPath := filepath.Join(tmpDir, "query.sql") + outPath := filepath.Join(tmpDir, "gen") + // SQLC requires the schema path to be relative to the sqlc.json file + schemaPath, err = filepath.Rel(tmpDir, schemaPath) + if err != nil { + return err + } + cfg := sqlcConfig{ + Version: "2", + SQL: []sqlcSQL{ + { + Schema: schemaPath, + Queries: "query.sql", + Engine: "postgresql", + Codegen: []sqlcCodegen{ + { + Out: "gen", + Plugin: "encore", + }, + }, + }, + }, + Plugins: []sqlcPlugin{ + { + Name: "encore", + Process: sqlcProcess{ + Cmd: os.Args[0], + }, + }, + }, + } + cfgData, err := json.Marshal(cfg) + if err != nil { + return err + } + err = os.WriteFile(sqlcPath, cfgData, 0644) + if err != nil { + return err + } + + // SQLC requires at least one query to be present in the query file + err = os.WriteFile(queryPath, []byte("-- name: Dummy :one\nSELECT 'dummy';"), 0644) + if err != nil { + return err + } + + res := cli.Run([]string{"generate", "-f", sqlcPath}) + if res != 0 { + return fmt.Errorf("sqlc exited with code %d", res) + } + reqBlob, err := os.ReadFile(filepath.Join(outPath, "output.pb")) + if !useProto { + req := &daemon.SQLCPlugin_GenerateRequest{} + if err := proto.Unmarshal(reqBlob, req); err != nil { + return err + } + reqBlob, err = protojson.MarshalOptions{ + EmitUnpopulated: true, + Indent: " ", + UseProtoNames: true, + }.Marshal(req) + if err != nil { + return err + } + } + + w := bufio.NewWriter(os.Stdout) + if _, err := w.Write(reqBlob); err != nil { + return err + } + if err := w.Flush(); err != nil { + return err + } + return nil + }, + } + genCmd.Flags().BoolVar(&useProto, "proto", false, "Output the parsed schema as protobuf") + pluginCmd := &cobra.Command{ + Use: "/plugin.CodegenService/Generate", + Short: "Plugin for SQLC: stores the parsed sqlc model in a protobuf file", + Hidden: true, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + reqBlob, err := io.ReadAll(os.Stdin) + if err != nil { + return err + } + resp := &daemon.SQLCPlugin_GenerateResponse{ + Files: []*daemon.SQLCPlugin_File{ + { + Name: "output.pb", + Contents: reqBlob, + }, + }, + } + respBlob, err := proto.Marshal(resp) + if err != nil { + return err + } + w := bufio.NewWriter(os.Stdout) + if _, err := w.Write(respBlob); err != nil { + return err + } + if err := w.Flush(); err != nil { + return err + } + return nil + }, + } + rootCmd.AddCommand(genCmd) + rootCmd.AddCommand(pluginCmd) +} diff --git a/cli/cmd/encore/telemetry.go b/cli/cmd/encore/telemetry.go new file mode 100644 index 0000000000..f5ae08146a --- /dev/null +++ b/cli/cmd/encore/telemetry.go @@ -0,0 +1,129 @@ +package main + +import ( + "context" + "fmt" + "os" + "slices" + "strings" + + "github.com/logrusorgru/aurora/v3" + "github.com/rs/zerolog/log" + "github.com/spf13/cobra" + + "encr.dev/cli/cmd/encore/cmdutil" + "encr.dev/cli/cmd/encore/root" + "encr.dev/cli/internal/telemetry" + "encr.dev/pkg/fns" + daemonpb "encr.dev/proto/encore/daemon" +) + +var TelemetryDisabledByEnvVar = os.Getenv("DISABLE_ENCORE_TELEMETRY") == "1" +var TelemetryDebugByEnvVar = os.Getenv("ENCORE_TELEMETRY_DEBUG") == "1" + +func printTelemetryStatus() { + status := aurora.Green("Enabled").String() + if !telemetry.IsEnabled() { + status = aurora.Red("Disabled").String() + } + fmt.Println(aurora.Sprintf("%s\n", aurora.Bold("Encore Telemetry"))) + items := [][2]string{ + {"Status", status}, + } + if root.Verbosity > 0 { + items = append(items, [2]string{"Install ID", telemetry.GetAnonID()}) + } + if telemetry.IsDebug() { + items = append(items, [2]string{"Debug", aurora.Green("Enabled").String()}) + } + maxKeyLen := fns.Max(items, func(entry [2]string) int { return len(entry[0]) }) + for _, item := range items { + spacing := strings.Repeat(" ", maxKeyLen-len(item[0])) + fmt.Printf("%s: %s%s\n", item[0], spacing, item[1]) + } + fmt.Println(aurora.Sprintf("\nLearn more: %s", aurora.Underline("https://encore.dev/docs/telemetry"))) +} + +func updateTelemetry(ctx context.Context) { + // Update the telemetry config on the daemon if it is running + if cmdutil.IsDaemonRunning(ctx) { + daemon := cmdutil.ConnectDaemon(ctx) + _, err := daemon.Telemetry(ctx, &daemonpb.TelemetryConfig{ + AnonId: telemetry.GetAnonID(), + Enabled: telemetry.IsEnabled(), + Debug: telemetry.IsDebug(), + }) + if err != nil { + log.Debug().Err(err).Msgf("could not update daemon telemetry: %s", err) + } + } + if err := telemetry.SaveConfig(); err != nil { + log.Debug().Err(err).Msgf("could not save telemetry: %s", err) + } +} + +var telemetryCommand = &cobra.Command{ + Use: "telemetry", + Short: "Reports the current telemetry status", + + Run: func(cmd *cobra.Command, args []string) { + printTelemetryStatus() + }, +} + +var telemetryEnableCommand = &cobra.Command{ + Use: "enable", + Short: "Enables telemetry reporting", + Run: func(cmd *cobra.Command, args []string) { + if telemetry.SetEnabled(true) { + updateTelemetry(cmd.Context()) + } + printTelemetryStatus() + }, +} + +var telemetryDisableCommand = &cobra.Command{ + Use: "disable", + Short: "Disables telemetry reporting", + Run: func(cmd *cobra.Command, args []string) { + if telemetry.SetEnabled(false) { + updateTelemetry(cmd.Context()) + } + printTelemetryStatus() + }, +} + +func isCommand(cmd *cobra.Command, name ...string) bool { + for cmd != nil { + if slices.Contains(name, cmd.Name()) { + return true + } + cmd = cmd.Parent() + } + return false +} + +func init() { + telemetryCommand.AddCommand(telemetryEnableCommand, telemetryDisableCommand) + rootCmd.AddCommand(telemetryCommand) + root.AddPreRun(func(cmd *cobra.Command, args []string) { + update := false + if TelemetryDisabledByEnvVar { + update = telemetry.SetEnabled(false) + } + if cmd.Use == "daemon" { + return + } + update = update || telemetry.SetDebug(TelemetryDebugByEnvVar) + if update { + go updateTelemetry(cmd.Context()) + } + if telemetry.ShouldShowWarning() && !isCommand(cmd, "version", "completion") { + fmt.Println() + fmt.Println(aurora.Sprintf("%s: This CLI tool collects usage data to help us improve Encore.", aurora.Bold("Note"))) + fmt.Println(aurora.Sprintf(" You can disable this by running '%s'.\n", aurora.Yellow("encore telemetry disable"))) + telemetry.SetShownWarning() + } + }) + +} diff --git a/cli/cmd/encore/test.go b/cli/cmd/encore/test.go index f05362fcf3..01a8ea1fb7 100644 --- a/cli/cmd/encore/test.go +++ b/cli/cmd/encore/test.go @@ -2,31 +2,82 @@ package main import ( "context" + "encoding/json" + "errors" + "fmt" "os" + "os/exec" "os/signal" + "path/filepath" + "slices" + "strings" + "time" - daemonpb "encr.dev/proto/encore/daemon" "github.com/spf13/cobra" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "encr.dev/cli/cmd/encore/cmdutil" + daemonpb "encr.dev/proto/encore/daemon" ) var testCmd = &cobra.Command{ - Use: "test", + Use: "test [go test flags]", Short: "Tests your application", + Long: "Takes all the same flags as `go test`.", + + DisableFlagsInUseLine: true, Run: func(cmd *cobra.Command, args []string) { - // Support --help but otherwise let all args be passed on to "go test" - for _, arg := range args { + var ( + traceFile string + codegenDebug bool + prepareOnly bool + noColor bool + ) + // Support specific args but otherwise let all args be passed on to "go test" + for i := 0; i < len(args); i++ { + arg := args[i] if arg == "-h" || arg == "--help" { - cmd.Help() + _ = cmd.Help() return + } else if arg == "--trace" || strings.HasPrefix(arg, "--trace=") { + // Drop this argument always. + args = slices.Delete(args, i, i+1) + i-- + + // We either have '--trace=file' or '--trace file'. + // Handle both. + if _, value, ok := strings.Cut(arg, "="); ok { + traceFile = value + } else { + // Make sure there is a next argument. + if i < len(args) { + traceFile = args[i] + args = slices.Delete(args, i, i+1) + i-- + } + } + } else if arg == "--codegen-debug" { + codegenDebug = true + args = slices.Delete(args, i, i+1) + i-- + } else if arg == "--prepare" { + prepareOnly = true + args = slices.Delete(args, i, i+1) + i-- + } else if arg == "--no-color" { + noColor = true + args = slices.Delete(args, i, i+1) + i-- } } appRoot, relPath := determineAppRoot() - runTests(appRoot, relPath, args) + runTests(appRoot, relPath, args, traceFile, codegenDebug, prepareOnly, noColor) }, } -func runTests(appRoot, testDir string, args []string) { +func runTests(appRoot, testDir string, args []string, traceFile string, codegenDebug, prepareOnly, noColor bool) { interrupt := make(chan os.Signal, 1) signal.Notify(interrupt, os.Interrupt) @@ -36,19 +87,121 @@ func runTests(appRoot, testDir string, args []string) { cancel() }() + converter := cmdutil.ConvertJSONLogs(cmdutil.Colorize(!noColor)) + if slices.Contains(args, "-json") { + converter = convertTestEventOutputOnly(converter) + } + daemon := setupDaemon(ctx) + + // Is this a node package? + packageJsonPath := filepath.Join(appRoot, "package.json") + if _, err := os.Stat(packageJsonPath); err == nil || prepareOnly { + spec, err := daemon.TestSpec(ctx, &daemonpb.TestSpecRequest{ + AppRoot: appRoot, + WorkingDir: testDir, + Args: args, + Environ: os.Environ(), + }) + if status.Code(err) == codes.NotFound { + fatal("application does not define any tests.\nNote: Add a 'test' script command to package.json to run tests.") + } else if err != nil { + fatal(err) + } + + if prepareOnly { + for _, ln := range spec.Environ { + fmt.Println(ln) + } + return + } + + cmd := exec.Command(spec.Command, spec.Args...) + cmd.Env = spec.Environ + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Stdin = os.Stdin + + if err := cmd.Run(); err != nil { + var exitErr *exec.ExitError + if errors.As(err, &exitErr) { + os.Exit(exitErr.ExitCode()) + } else { + fatal(err) + } + } + return + } + stream, err := daemon.Test(ctx, &daemonpb.TestRequest{ - AppRoot: appRoot, - WorkingDir: testDir, - Args: args, + AppRoot: appRoot, + WorkingDir: testDir, + Args: args, + Environ: os.Environ(), + TraceFile: nonZeroPtr(traceFile), + CodegenDebug: codegenDebug, }) if err != nil { fatal(err) } - streamCommandOutput(stream) + os.Exit(cmdutil.StreamCommandOutput(stream, converter)) } func init() { testCmd.DisableFlagParsing = true rootCmd.AddCommand(testCmd) + + // Even though we've disabled flag parsing, we still need to define the flags + // so that the help text is correct. + testCmd.Flags().Bool("codegen-debug", false, "Dump generated code (for debugging Encore's code generation)") + testCmd.Flags().Bool("prepare", false, "Prepare for running tests (without running them)") + testCmd.Flags().String("trace", "", "Specifies a trace file to write trace information about the parse and compilation process to.") + testCmd.Flags().Bool("no-color", false, "Disable colorized output") + +} + +func convertTestEventOutputOnly(converter cmdutil.OutputConverter) cmdutil.OutputConverter { + return func(line []byte) []byte { + // If this isn't a JSON log line, just return it as-is + if len(line) == 0 || line[0] != '{' { + return line + } + + testEvent := &testJSONEvent{} + if err := json.Unmarshal(line, testEvent); err == nil && testEvent.Action == "output" { + if testEvent.Output != nil && (*(testEvent.Output))[0] == '{' { + convertedLogs := textBytes(converter(*testEvent.Output)) + testEvent.Output = &convertedLogs + + newLine, err := json.Marshal(testEvent) + if err == nil { + return append(newLine, '\n') + } + } + } + + return line + } +} + +// testJSONEvent and textBytes taken from the Go source code +type testJSONEvent struct { + Time *time.Time `json:",omitempty"` + Action string + Package string `json:",omitempty"` + Test string `json:",omitempty"` + Elapsed *float64 `json:",omitempty"` + Output *textBytes `json:",omitempty"` +} + +// textBytes is a hack to get JSON to emit a []byte as a string +// without actually copying it to a string. +// It implements encoding.TextMarshaler, which returns its text form as a []byte, +// and then json encodes that text form as a string (which was our goal). +type textBytes []byte + +func (b *textBytes) MarshalText() ([]byte, error) { return *b, nil } +func (b *textBytes) UnmarshalText(in []byte) error { + *b = in + return nil } diff --git a/cli/cmd/encore/version.go b/cli/cmd/encore/version.go index f7d7274382..d7cbf1d412 100644 --- a/cli/cmd/encore/version.go +++ b/cli/cmd/encore/version.go @@ -1,24 +1,95 @@ package main import ( + "context" "fmt" "os" + "strings" + "time" + "github.com/logrusorgru/aurora/v3" "github.com/spf13/cobra" -) -// Version is the version of the encore binary. -// It is set using `go build -ldflags "-X main.Version=v1.2.3"`. -var Version string + "encr.dev/cli/internal/update" + "encr.dev/internal/version" +) var versionCmd = &cobra.Command{ Use: "version", Short: "Reports the current version of the encore application", + + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + var ( + ver *update.LatestVersion + err error + ) + if version.Version != "" { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + ver, err = update.Check(ctx) + } + + // NOTE: This output format is relied on by the Encore IntelliJ plugin. + // Don't change this without considering its impact on that plugin. + fmt.Fprintln(os.Stdout, "encore version", version.Version) + + if err != nil { + fatalf("could not check for update: %v", err) + } else if ver.IsNewer(version.Version) { + if ver.ForceUpgrade { + fmt.Println(aurora.Red("An urgent security update for Encore is available.")) + if ver.SecurityNotes != "" { + fmt.Println(aurora.Sprintf(aurora.Yellow("%s"), ver.SecurityNotes)) + } + + versionUpdateCmd.Run(cmd, args) + } else { + if ver.SecurityUpdate { + fmt.Println(aurora.Sprintf(aurora.Red("A security update is update available: %s -> %s\nUpdate with: encore version update"), version.Version, ver.Version())) + + if ver.SecurityNotes != "" { + fmt.Println(aurora.Sprintf(aurora.Yellow("%s"), ver.SecurityNotes)) + } + } else { + fmt.Println(aurora.Sprintf(aurora.Yellow("Update available: %s -> %s\nUpdate with: encore version update"), version.Version, ver.Version())) + } + } + } + }, +} + +var versionUpdateCmd = &cobra.Command{ + Use: "update", + Short: "Checks for an update of encore and, if one is available, runs the appropriate command to update it.", + + DisableFlagsInUseLine: true, Run: func(cmd *cobra.Command, args []string) { - fmt.Fprintln(os.Stdout, "encore version", Version) + if version.Version == "" || strings.HasPrefix(version.Version, "devel") { + fatal("cannot update development build, first install Encore from https://encore.dev/docs/install") + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + ver, err := update.Check(ctx) + if err != nil { + fatalf("could not check for update: %v", err) + } + if ver.IsNewer(version.Version) { + fmt.Printf("Upgrading Encore to %v...\n", ver.Version()) + + if err := ver.DoUpgrade(os.Stdout, os.Stderr); err != nil { + fatalf("could not update: %v", err) + os.Exit(1) + } + os.Exit(0) + } else { + fmt.Println("Encore already up to date.") + } }, } func init() { + versionCmd.AddCommand(versionUpdateCmd) rootCmd.AddCommand(versionCmd) } diff --git a/cli/cmd/encore/vpn.go b/cli/cmd/encore/vpn.go deleted file mode 100644 index 80060d9747..0000000000 --- a/cli/cmd/encore/vpn.go +++ /dev/null @@ -1,111 +0,0 @@ -package main - -import ( - "context" - "errors" - "fmt" - "log" - "os" - "time" - - "encr.dev/cli/internal/conf" - "encr.dev/cli/internal/wgtunnel" - "encr.dev/cli/internal/xos" - "github.com/spf13/cobra" - "golang.zx2c4.com/wireguard/wgctrl/wgtypes" -) - -var vpnCmd = &cobra.Command{ - Use: "vpn", - Short: "VPN management commands", -} - -func init() { - rootCmd.AddCommand(vpnCmd) - - startCmd := &cobra.Command{ - Use: "start", - Short: "Sets up a secure connection to private environments", - Run: func(cmd *cobra.Command, args []string) { - if admin, err := xos.IsAdminUser(); err == nil && !admin { - log.Fatalf("fatal: must start VPN as root user (use 'sudo'?)") - } - - cfg, err := conf.OriginalUser("") - if errors.Is(err, os.ErrNotExist) { - log.Fatalf("fatal: not logged in. run 'encore auth login' first") - } else if err != nil { - log.Fatalf("fatal: could not read encore config (did you run 'encore auth login'?): %v", err) - } else if cfg.WireGuard.PrivateKey == "" || cfg.WireGuard.PublicKey == "" { - log.Println("encore: generating WireGuard key...") - pub, priv, err := wgtunnel.GenKey() - if err != nil { - log.Fatalf("fatal: could not generate WireGuard key: %v", err) - } - cfg.WireGuard.PublicKey = pub.String() - cfg.WireGuard.PrivateKey = priv.String() - if err := conf.Write(cfg); err != nil { - log.Fatalf("fatal: could not write updated config: %v", err) - } - log.Println("encore: successfully generated and persisted WireGuard key") - } - - pubKey, err1 := wgtypes.ParseKey(cfg.WireGuard.PublicKey) - privKey, err2 := wgtypes.ParseKey(cfg.WireGuard.PrivateKey) - if err1 != nil || err2 != nil { - fatalf("could not parse public/private key: %v/%v", err1, err2) - } - - log.Printf("encore: registering device with server...") - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - ip, err := wgtunnel.RegisterDevice(ctx, pubKey) - if err != nil { - log.Fatalf("fatal: could not register device: %v", err) - } - log.Printf("encore: successfully registered device, assigned address %s", ip) - - log.Printf("encore: starting WireGuard tunnel...") - cc := &wgtunnel.ClientConfig{ - Addr: ip, - PrivKey: privKey, - } - if err := wgtunnel.Start(cc, nil); err != nil { - log.Fatalf("fatal: could not start tunnel: %v", err) - } - log.Printf("encore: successfully started WireGuard tunnel") - }, - } - vpnCmd.AddCommand(startCmd) - - stopCmd := &cobra.Command{ - Use: "stop", - Short: "Stops the VPN connection", - Run: func(cmd *cobra.Command, args []string) { - if err := wgtunnel.Stop(); os.IsPermission(err) { - log.Fatal("fatal: permission denied to stop tunnel (use 'sudo'?)") - } else if err != nil { - log.Fatalf("fatal: could not stop tunnel: %v", err) - } - log.Printf("encore: stopped WireGuard tunnel") - }, - } - vpnCmd.AddCommand(stopCmd) - - statusCmd := &cobra.Command{ - Use: "status", - Short: "Determines the status of the VPN connection", - Run: func(cmd *cobra.Command, args []string) { - if running, err := wgtunnel.Status(); os.IsPermission(err) { - log.Fatal("fatal: permission denied to check tunnel status (use 'sudo'?)") - } else if err != nil { - log.Fatalf("fatal: could not check tunnel status: %v", err) - } else if running { - fmt.Fprintln(os.Stdout, "running") - } else { - fmt.Fprintln(os.Stdout, "not running") - } - }, - } - vpnCmd.AddCommand(statusCmd) -} diff --git a/cli/cmd/encore/vpn_darwin.go b/cli/cmd/encore/vpn_darwin.go deleted file mode 100644 index 0c85936d6e..0000000000 --- a/cli/cmd/encore/vpn_darwin.go +++ /dev/null @@ -1,27 +0,0 @@ -package main - -import ( - "log" - - "encr.dev/cli/internal/wgtunnel" - "encr.dev/cli/internal/xos" - "github.com/spf13/cobra" -) - -func init() { - runCmd := &cobra.Command{ - Use: "__run", - Short: "Runs the WireGuard tunnel synchronously.", - Hidden: true, - Run: func(cmd *cobra.Command, args []string) { - if admin, err := xos.IsAdminUser(); err == nil && !admin { - log.Fatalf("fatal: must start VPN as root user (use 'sudo'?)") - } - if err := wgtunnel.Run(); err != nil { - fatal(err) - } - }, - } - - vpnCmd.AddCommand(runCmd) -} diff --git a/cli/cmd/encore/vpn_windows.go b/cli/cmd/encore/vpn_windows.go deleted file mode 100644 index 7ba2d477e3..0000000000 --- a/cli/cmd/encore/vpn_windows.go +++ /dev/null @@ -1,69 +0,0 @@ -package main - -import ( - "fmt" - "os" - - "encr.dev/cli/internal/winsvc" - "github.com/spf13/cobra" -) - -func init() { - installCmd := &cobra.Command{ - Hidden: true, - Use: "svc-install", - Short: "Installs the windows service for the WireGuard tunnel", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - if err := winsvc.Install(args[0]); err != nil { - fatal(err) - } - }, - } - vpnCmd.AddCommand(installCmd) - - uninstallCmd := &cobra.Command{ - Hidden: true, - Use: "svc-uninstall", - Short: "Uninstalls the windows service", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - if err := winsvc.Uninstall(args[0]); err != nil { - fatal(err) - } - }, - } - vpnCmd.AddCommand(uninstallCmd) - - statusCmd := &cobra.Command{ - Hidden: true, - Use: "svc-status", - Short: "Uninstalls the windows service", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - installed, err := winsvc.Status(args[0]) - if err != nil { - fatal(err) - } - if installed { - fmt.Fprintln(os.Stdout, "installed") - } else { - fmt.Fprintln(os.Stdout, "not installed") - } - }, - } - vpnCmd.AddCommand(statusCmd) - - runCmd := &cobra.Command{ - Hidden: true, - Use: "svc-run", - Short: "Runs the windows service", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - if err := winsvc.Run(args[0]); err != nil { - fatal(err) - } - }, - } - vpnCmd.AddCommand(runCmd) -} diff --git a/cli/cmd/git-remote-encore/main.go b/cli/cmd/git-remote-encore/main.go index b26a36dae1..c22bdcc0d9 100644 --- a/cli/cmd/git-remote-encore/main.go +++ b/cli/cmd/git-remote-encore/main.go @@ -9,9 +9,10 @@ import ( "net/url" "os" "os/exec" + "path/filepath" "strings" - "encr.dev/cli/internal/conf" + "encr.dev/internal/conf" ) func main() { @@ -21,6 +22,20 @@ func main() { } } +var isLocalTest = (func() bool { + return filepath.Base(os.Args[0]) == "git-remote-encorelocal" +})() + +// remoteScheme is the remote scheme we expect. +// It's "encore" in general but "encorelocal" for local development. +var remoteScheme = (func() string { + if isLocalTest { + return "encorelocal" + } else { + return "encore" + } +})() + func run(args []string) error { stdin := bufio.NewReader(os.Stdin) stdout := os.Stdout @@ -52,12 +67,12 @@ func connect(args []string, svc string) error { uri, err := url.Parse(args[2]) if err != nil { return fmt.Errorf("connect %s: invalid remote uri: %v", os.Args[2], err) - } else if uri.Scheme != "encore" { - return fmt.Errorf("connect %s: expected remote scheme \"encore\", got %q", os.Args[2], uri.Scheme) + } else if uri.Scheme != remoteScheme { + return fmt.Errorf("connect %s: expected remote scheme %q, got %q", os.Args[2], remoteScheme, uri.Scheme) } appID := uri.Hostname() - ts := &conf.TokenSource{} + ts := conf.NewTokenSource() tok, err := ts.Token() if err != nil { return fmt.Errorf("could not get Encore auth token: %v", err) @@ -68,25 +83,46 @@ func connect(args []string, svc string) error { return err } keyPath := f.Name() - defer os.Remove(keyPath) + defer func() { _ = os.Remove(keyPath) }() + if err := f.Chmod(0600); err != nil { - f.Close() + _ = f.Close() return err } else if _, err := f.Write([]byte(SentinelPrivateKey)); err != nil { - f.Close() + _ = f.Close() return err } else if err := f.Close(); err != nil { return err } + // Create a dummy config file so that we can work around any host overrides + // present on the system. + cfg, err := os.CreateTemp("", "encore-dummy-ssh-config") + if err != nil { + return err + } + cfgPath := cfg.Name() + defer func() { _ = os.Remove(cfgPath) }() + // Communicate to Git that the connection is established. - os.Stdout.Write([]byte("\n")) + _, _ = os.Stdout.Write([]byte("\n")) + + sshServer, port := "git.encore.dev", "22" + if isLocalTest { + sshServer, port = "localhost", "9040" + } // Set up an SSH tunnel with a sentinel key as a way to signal // Encore to use token-based authentication, and pass the token // as part of the command. - cmd := exec.Command("ssh", "-x", "-T", "-o", "IdentitiesOnly=yes", "-i", keyPath, - "git.encore.dev", fmt.Sprintf("token=%s %s '%s'", tok.AccessToken, svc, appID)) + cmd := exec.Command("ssh", + "-x", "-T", + "-F", cfgPath, + "-o", "IdentitiesOnly=yes", + "-i", keyPath, + "-p", port, + sshServer, + fmt.Sprintf("token=%s %s '%s'", tok.AccessToken, svc, appID)) cmd.Env = []string{} cmd.Stdin = os.Stdin cmd.Stdout = os.Stdout @@ -100,20 +136,12 @@ func connect(args []string, svc string) error { // // NOTE: This is not a security problem. The key is meant to be public // and does not serve as a means of authentication. +// nosemgrep const SentinelPrivateKey = `-----BEGIN OPENSSH PRIVATE KEY----- -b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAlwAAAAdzc2gtcn -NhAAAAAwEAAQAAAIEA1ZrV6bnLgKI7cZHGn3Z93jTATaGjw6ytPdSorrnwYRP3K833BC19 -ANPSWAoXcYXNDIR90j/V+sd5ILv5NUoctdV1+2J8jzW/hedj0HuDou1YruNHVowfE3JFYr -6eMK15kvc/K9EsIl/TfH9/RiWVnWq1wHwOdZtH2UZE9QdT+r0AAAIIrcJlP63CZT8AAAAH -c3NoLXJzYQAAAIEA1ZrV6bnLgKI7cZHGn3Z93jTATaGjw6ytPdSorrnwYRP3K833BC19AN -PSWAoXcYXNDIR90j/V+sd5ILv5NUoctdV1+2J8jzW/hedj0HuDou1YruNHVowfE3JFYr6e -MK15kvc/K9EsIl/TfH9/RiWVnWq1wHwOdZtH2UZE9QdT+r0AAAADAQABAAAAgBndpgmndf -0dqBUYkfS9ZICD4sWDzVDkmBXkqoh9+53FzSiAyGi5GWoAPHhswGn+ydW6NYJAOKklfoV4 -PbU2REOHwXYblAZmDmPksSN1IbjDdFZ+0vXFUmS2k30eqIgIEGOrN1tnLXoK+B4kwFQ1IN -UMMpB39vRyhyrEGv+S4gQBAAAAQFiOrnRAtY50ZiqXND3SdCnQxnjmUxcE7pcQaaQK6KMP -A7bQpMNzJop/UpNRIjLb5bPG9FPgTzQ5+5l4fGL5OwYAAABBAP4V8q7KQLqoPsHaWG7pga -iE9cUzE9hle2zXiRCcXt2qXxB7P1U9DQVdzVwarfAggIGRsqjJmEDe69F/I4QAkj8AAABB -ANc20AXzRmnneRyZuOEUhTsdNWcQf9qv+tQh3DDr7SW7NhuSKW9CqC18nbDckEp0yOCjIR -k5HAPXd2pDop0UvAMAAAAPZWFuZHJlQG0xLmxvY2FsAQIDBA== +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW +QyNTUxOQAAACCyj3F5Tp1eBIp7rMohszumYzlys/BFfmX/LVkXJS8magAAAJjsp3yz7Kd8 +swAAAAtzc2gtZWQyNTUxOQAAACCyj3F5Tp1eBIp7rMohszumYzlys/BFfmX/LVkXJS8mag +AAAEDMiwRrf5WET2mTKjKjX7z6vox3n6hKGKbP7V4MDtVre7KPcXlOnV4EinusyiGzO6Zj +OXKz8EV+Zf8tWRclLyZqAAAAE2VuY29yZS1zZW50aW5lbC1rZXkBAg== -----END OPENSSH PRIVATE KEY----- ` diff --git a/cli/cmd/tsbundler-encore/main.go b/cli/cmd/tsbundler-encore/main.go new file mode 100644 index 0000000000..90aa1e4fa3 --- /dev/null +++ b/cli/cmd/tsbundler-encore/main.go @@ -0,0 +1,297 @@ +package main + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/evanw/esbuild/pkg/api" + + "encr.dev/internal/version" + + flag "github.com/spf13/pflag" +) + +var ( + entryPoints []string + specifiedEngines []string + // replacementFile string + outDir string + bundle bool + minify bool + help bool + logLevel int +) + +// main is the entry point for the tsbundler-encore command. +// +// It is responsible for parsing the command line flags, validating the input, and then triggering esbuild. +// +// Run with --help for more information. +func main() { + // Required flags + // flag.StringVar(&replacementFile, "replacements", "", "Replacement file or json object (default read from stdin)") + + // Optional flags + flag.StringVar(&outDir, "outdir", "dist", "Output directory") + flag.BoolVar(&bundle, "bundle", true, "Bundle all dependencies") + flag.BoolVar(&minify, "minify", false, "Minify output (default false)") + flag.StringArrayVar(&specifiedEngines, "engine", []string{"node:21"}, "Target engine") + flag.CountVarP(&logLevel, "verbose", "v", "Increase logging level (can be specified multiple times)") + flag.BoolVarP(&help, "help", "h", false, "Print help") + flag.Usage = printHelp + flag.Parse() + + entryPoints = flag.Args() + if help { + printHelp() + os.Exit(0) + } + + // Validate input (note: these functions will exit on error) + validateEntrypointParams() + engines := readEngines() + // replacements := readReplacementMapping() + + // Create our transformer plugin + // rewritePlugin := api.Plugin{ + // Name: "encore-codegen-transformer", + // Setup: func(build api.PluginBuild) { + // build.OnLoad( + // api.OnLoadOptions{Filter: `\.(ts|js)(x?)$`}, + // func(args api.OnLoadArgs) (api.OnLoadResult, error) { + // replacement, found := replacements[args.Path] + // if !found { + // return api.OnLoadResult{}, nil + // } + + // contentsBytes, err := os.ReadFile(replacement) + // if err != nil { + // return api.OnLoadResult{}, fmt.Errorf("error reading replacement file: %w", err) + // } + // content := string(contentsBytes) + + // return api.OnLoadResult{ + // PluginName: "encore-codegen-transformer", + // Contents: &content, + // Loader: api.LoaderTS, + // }, nil + // }, + // ) + // }, + // } + + banner := `// This file was bundled by Encore ` + version.Version + ` +// +// https://encore.dev` + + outBase := "" + if len(entryPoints) == 1 { + // If there's a single entrypoint, use its directory as the outbase + // as otherwise esbuild won't include the "[dir]" token in the output. + outBase = filepath.Dir(filepath.Dir(entryPoints[0])) + } + + // Trigger esbuild + result := api.Build(api.BuildOptions{ + // Setup base settings + LogLevel: api.LogLevelWarning - api.LogLevel(logLevel), + Banner: map[string]string{"js": banner}, + Charset: api.CharsetUTF8, + Sourcemap: api.SourceMapLinked, + Packages: api.PackagesExternal, + Plugins: []api.Plugin{ + // rewritePlugin, + }, + TreeShaking: api.TreeShakingTrue, + + // Set our build target + Platform: api.PlatformNode, + Format: api.FormatESModule, + Target: api.ES2022, + Engines: engines, + + // Minification settings + MinifyWhitespace: minify, + MinifySyntax: minify, + MinifyIdentifiers: minify, + + // Pass in what we want to build + EntryNames: "[dir]/[name]", + EntryPoints: entryPoints, + Bundle: bundle, + Outdir: outDir, + Outbase: outBase, + Write: true, // Write to outdir + OutExtension: map[string]string{ + ".js": ".mjs", + }, + Define: map[string]string{ + "ENCORE_DROP_TESTS": "true", + }, + }) + + if len(result.Errors) > 0 { + os.Exit(1) + } +} + +func printHelp() { + binary := filepath.Base(os.Args[0]) + + // Base usage help + versionStr := fmt.Sprintf("tsbundler-encore (%s)", version.Version) + _, _ = fmt.Fprintf(os.Stderr, "%s\n%s\n", versionStr, strings.Repeat("=", len(versionStr))) + _, _ = fmt.Fprintf(os.Stderr, "\nUsage: %s [options]\n", binary) + flag.PrintDefaults() + + // Replacements help + // _, _ = fmt.Fprintf(os.Stderr, "\nReplacements JSON Format:\n") + // _, _ = fmt.Fprintf(os.Stderr, " {\n") + // _, _ = fmt.Fprintf(os.Stderr, " \"/absolute/path/to/file.ts\": \"/path/to/replacement.ts\",\n") + // _, _ = fmt.Fprintf(os.Stderr, " \"/absolute/path/to/file2.ts\": \"/path/to/replacement2.ts\"\n") + // _, _ = fmt.Fprintf(os.Stderr, " }\n") + + // Engine help + _, _ = fmt.Fprintf(os.Stderr, "\nEngines:\n\nEngines can be specified as a name, or a name and version separated by a colon,\nfor example \"node:21\" or \"node\". Multiple engines can be specified if required.\n\nThe supported engines are:\n") + _, _ = fmt.Fprintf(os.Stderr, " - node\n") + _, _ = fmt.Fprintf(os.Stderr, " - bun\n") + _, _ = fmt.Fprintf(os.Stderr, " - deno\n") + _, _ = fmt.Fprintf(os.Stderr, " - rhino\n") +} + +// validateEntrypointParams validates that the entry points parameters was specified and that all entry points exist +// and are readable on the file system. +func validateEntrypointParams() { + if len(entryPoints) == 0 { + _, _ = fmt.Fprintf(os.Stderr, "Error: at least one entry point must be specified\n\n") + printHelp() + os.Exit(1) + } + + for _, entryPoint := range entryPoints { + if st, err := os.Stat(entryPoint); errors.Is(err, fs.ErrNotExist) { + _, _ = fmt.Fprintf(os.Stderr, "Error: entry point %s does not exist\n", entryPoint) + os.Exit(1) + } else if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "Error: error reading entry point %s: %s\n", entryPoint, err) + os.Exit(1) + } else if st.IsDir() { + _, _ = fmt.Fprintf(os.Stderr, "Error: entry point %s is a directory\n", entryPoint) + os.Exit(1) + } + } +} + +// readReplacementMapping reads a replacement mapping from either a file or stdin depending +// on if the replacementFile flag was specified. +// +// It then validates that all the keys are valid paths to files and the values are valid paths to files. +// func readReplacementMapping() map[string]string { +// out := make(map[string]string) + +// // If a replacement file was specified, read it +// replacementFile = strings.TrimSpace(replacementFile) +// if replacementFile != "" { +// if replacementFile[0] == '{' { +// err := json.Unmarshal([]byte(replacementFile), &out) +// if err != nil { +// _, _ = fmt.Fprintf(os.Stderr, "Error parsing replacement object: %s\n", err) +// os.Exit(1) +// } +// } else { +// data, err := os.ReadFile(replacementFile) +// if err != nil { +// _, _ = fmt.Fprintf(os.Stderr, "Error reading replacement file: %s\n", err) +// os.Exit(1) +// } + +// err = json.Unmarshal(data, &out) +// if err != nil { +// _, _ = fmt.Fprintf(os.Stderr, "Error parsing replacement file: %s\n", err) +// os.Exit(1) +// } +// } +// } else { +// // Check something is being piped in +// info, _ := os.Stdin.Stat() +// if (info.Mode()&os.ModeCharDevice) != 0 || info.Size() <= 0 { +// _, _ = fmt.Fprintf(os.Stderr, "Error: no replacement file specified and nothing piped in\n") +// os.Exit(1) +// } + +// // Otherwise, read from stdin +// if err := json.NewDecoder(os.Stdin).Decode(&out); err != nil { +// _, _ = fmt.Fprintf(os.Stderr, "Error reading replacement file from stdin: %s\n", err) +// os.Exit(1) +// } +// } + +// // Validate that all the keys are valid paths to files and the values are valid paths to files +// for key, value := range out { +// // Validate key +// if st, err := os.Stat(key); errors.Is(err, fs.ErrNotExist) { +// _, _ = fmt.Fprintf(os.Stderr, "Error: replacement key %s does not exist\n", key) +// os.Exit(1) +// } else if err != nil { +// _, _ = fmt.Fprintf(os.Stderr, "Error: error reading replacement key %s: %s\n", key, err) +// os.Exit(1) +// } else if st.IsDir() { +// _, _ = fmt.Fprintf(os.Stderr, "Error: replacement key %s is a directory\n", key) +// os.Exit(1) +// } else if !filepath.IsAbs(key) { +// _, _ = fmt.Fprintf(os.Stderr, "Error: replacement key %s is not an absolute path\n", key) +// os.Exit(1) +// } + +// // Validate value +// if st, err := os.Stat(value); errors.Is(err, fs.ErrNotExist) { +// _, _ = fmt.Fprintf(os.Stderr, "Error: replacement value %s does not exist\n", value) +// os.Exit(1) +// } else if err != nil { +// _, _ = fmt.Fprintf(os.Stderr, "Error: error reading replacement value %s: %s\n", value, err) +// os.Exit(1) +// } else if st.IsDir() { +// _, _ = fmt.Fprintf(os.Stderr, "Error: replacement value %s is a directory\n", value) +// os.Exit(1) +// } +// } + +// return out +// } + +// readEngines reads the engines from the specified flag and returns a list of engines. +func readEngines() []api.Engine { + if len(specifiedEngines) == 0 { + _, _ = fmt.Fprintf(os.Stderr, "Error: at least one engine must be specified\n\n") + printHelp() + os.Exit(1) + } + + var engines []api.Engine + for _, engineName := range specifiedEngines { + engineName = strings.ToLower(strings.TrimSpace(engineName)) + engineName, engineVersion, _ := strings.Cut(engineName, ":") + + var eng api.Engine + switch engineName { + case "node", "bun": // Note: esbuild doesn't have a "bun" engine (yet), but to future proof we'll alias it to node + eng = api.Engine{Name: api.EngineNode, Version: engineVersion} + case "deno": + eng = api.Engine{Name: api.EngineDeno, Version: engineVersion} + case "rhino": + eng = api.Engine{Name: api.EngineRhino, Version: engineVersion} + default: + _, _ = fmt.Fprintf(os.Stderr, "Error: unknown/unsupported engine %s\n\n", engineName) + printHelp() + os.Exit(1) + } + + engines = append(engines, eng) + } + + return engines +} diff --git a/cli/daemon/apps/apps.go b/cli/daemon/apps/apps.go new file mode 100644 index 0000000000..8169a9f66f --- /dev/null +++ b/cli/daemon/apps/apps.go @@ -0,0 +1,566 @@ +package apps + +import ( + "database/sql" + "io/fs" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/cockroachdb/errors" + "github.com/golang/protobuf/proto" + "github.com/rs/zerolog/log" + "go4.org/syncutil" + + "encore.dev/appruntime/exported/experiments" + "encr.dev/cli/internal/manifest" + "encr.dev/internal/conf" + "encr.dev/internal/env" + "encr.dev/internal/goldfish" + "encr.dev/pkg/appfile" + "encr.dev/pkg/fns" + "encr.dev/pkg/watcher" + "encr.dev/pkg/xos" + meta "encr.dev/proto/encore/parser/meta/v1" +) + +var ErrNotFound = errors.New("app not found") + +func NewManager(db *sql.DB) *Manager { + return &Manager{ + db: db, + instances: make(map[string]*Instance), + } +} + +// Manager keeps track of known apps and watches them for changes. +type Manager struct { + db *sql.DB + setupWatch syncutil.Once + + appRegMu sync.Mutex + appListeners []func(*Instance) + + watchMu sync.Mutex + watchers []WatchFunc + + instanceMu sync.Mutex + instances map[string]*Instance // root -> instance +} + +type TrackOption func(*Instance) error + +func WithTutorial(tutorial string) TrackOption { + return func(i *Instance) error { + err := manifest.SetTutorial(i.root, tutorial) + if err != nil { + return errors.Wrap(err, "set tutorial") + } + i.tutorial = tutorial + return nil + } +} + +// Track begins tracking an app, and marks it as updated +// if the app is already tracked. +func (mgr *Manager) Track(appRoot string, options ...TrackOption) (*Instance, error) { + app, err := mgr.resolve(appRoot) + for _, opt := range options { + if err := opt(app); err != nil { + return nil, err + } + } + if err != nil { + return nil, err + } + _, err = mgr.db.Exec(` + INSERT OR REPLACE INTO app (root, local_id, platform_id, updated_at) + VALUES (?, ?, ?, ?) + `, app.root, app.localID, app.PlatformID(), time.Now()) + if err != nil { + return nil, errors.Wrap(err, "update app store") + } + log.Info().Str("app_id", app.PlatformOrLocalID()).Msg("tracking app") + return app, nil +} + +// FindLatestByPlatformID finds the most recently updated app instance with the given platformID. +// If no such app is found it reports an error matching ErrNotFound. +func (mgr *Manager) FindLatestByPlatformID(platformID string) (*Instance, error) { + var root string + err := mgr.db.QueryRow(` + SELECT root + FROM app + WHERE platform_id = ? + ORDER BY updated_at DESC + LIMIT 1 + `, platformID).Scan(&root) + if errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(ErrNotFound) + } else if err != nil { + return nil, errors.Wrap(err, "query app store") + } + + return mgr.resolve(root) +} + +func (mgr *Manager) FindLatestByPlatformOrLocalID(id string) (*Instance, error) { + // Local ID do not contain hyphens, platform ID's always contain hyphens. + if strings.Contains(id, "-") { + return mgr.FindLatestByPlatformID(id) + } + + var root string + err := mgr.db.QueryRow(` + SELECT root + FROM app + WHERE local_id = ? + ORDER BY updated_at DESC + LIMIT 1 + `, id).Scan(&root) + if errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(ErrNotFound) + } else if err != nil { + return nil, errors.Wrap(err, "query app store") + } + + return mgr.resolve(root) +} + +// List lists all known apps. +func (mgr *Manager) List() ([]*Instance, error) { + roots, err := mgr.listRoots() + if err != nil { + return nil, err + } + + var apps []*Instance + for _, root := range roots { + app, err := mgr.resolve(root) + if errors.Is(err, fs.ErrNotExist) { + log.Debug().Str("root", root).Msg("app no longer exists, skipping") + // Delete the + _, _ = mgr.db.Exec(`DELETE FROM app WHERE root = ?`, root) + continue + } else if err != nil { + log.Error().Err(err).Str("root", root).Msg("unable to resolve app") + continue + } + apps = append(apps, app) + } + + return apps, nil +} + +func (mgr *Manager) listRoots() ([]string, error) { + rows, err := mgr.db.Query(`SELECT root FROM app`) + if err != nil { + return nil, errors.Wrap(err, "query app roots") + } + defer fns.CloseIgnore(rows) + + var roots []string + for rows.Next() { + var root string + if err := rows.Scan(&root); err != nil { + return nil, errors.Wrap(err, "scan row") + } + roots = append(roots, root) + } + err = errors.Wrap(rows.Err(), "iterate rows") + return roots, err +} + +// RegisterAppListener registers a callback that gets invoked every time +// an app is tracked. +func (mgr *Manager) RegisterAppListener(fn func(*Instance)) { + mgr.instanceMu.Lock() + defer mgr.instanceMu.Unlock() + + mgr.appRegMu.Lock() + mgr.appListeners = append(mgr.appListeners, fn) + mgr.appRegMu.Unlock() + + // Call the handler for all existing apps + for _, inst := range mgr.instances { + fn(inst) + } +} + +// WatchFunc is the signature of functions registered as app watchers. +type WatchFunc func(*Instance, []watcher.Event) + +// WatchAll watches all apps for changes. +func (mgr *Manager) WatchAll(fn WatchFunc) error { + err := mgr.setupWatch.Do(func() error { + // Begin tracking all known apps by calling List (since it calls resolve). + _, err := mgr.List() + return err + }) + if err != nil { + return err + } + + mgr.watchMu.Lock() + mgr.watchers = append(mgr.watchers, fn) + mgr.watchMu.Unlock() + return nil +} + +func (mgr *Manager) onWatchEvent(i *Instance, ev []watcher.Event) { + mgr.watchMu.Lock() + watchers := mgr.watchers + mgr.watchMu.Unlock() + for _, fn := range watchers { + fn(i, ev) + } +} + +// resolve resolves the current information about the app located at appRoot. +// If the app does not exist (either because appRoot does not exist, +// or because encore.app does not exist within it), it reports an error +// matching fs.ErrNotExist. +func (mgr *Manager) resolve(appRoot string) (*Instance, error) { + mgr.instanceMu.Lock() + defer mgr.instanceMu.Unlock() + + if existing, ok := mgr.instances[appRoot]; ok { + return existing, nil + } + + platformID, err := readPlatformID(appRoot) + if err != nil { + return nil, err + } + + // Parse the manifest file + man, err := manifest.ReadOrCreate(appRoot) + if err != nil { + return nil, errors.Wrap(err, "parse manifest") + } + + i := NewInstance(appRoot, man.LocalID, platformID) + i.tutorial = man.Tutorial + i.mgr = mgr + if err := i.beginWatch(); err != nil && !errors.Is(err, fs.ErrNotExist) { + log.Error().Err(err).Str("id", i.PlatformOrLocalID()).Msg("unable to begin watching app") + } + mgr.instances[appRoot] = i + + // Notify any listeners about the new app + for _, fn := range mgr.appListeners { + fn(i) + } + + return i, nil +} + +func (mgr *Manager) Close() error { + mgr.instanceMu.Lock() + defer mgr.instanceMu.Unlock() + + for _, inst := range mgr.instances { + if err := inst.Close(); err != nil { + log.Err(err).Str("id", inst.PlatformOrLocalID()).Msg("unable to close app instance") + // do not return an error here as we want to close all instances + } + } + + return nil +} + +// Instance describes an app instance known by the Encore daemon. +type Instance struct { + root string + localID string + platformID *goldfish.Cache[string] + tutorial string + + // mgr is a reference to the manager that created it. + // It may be nil if an instance was created without a manager. + mgr *Manager + watcher *watcher.Watcher + + setupWatch syncutil.Once + watchMu sync.Mutex + nextWatchID WatchSubscriptionID + watchers map[WatchSubscriptionID]*watchSubscription + + mdMu sync.Mutex + cachedMd *meta.Data +} + +func NewInstance(root, localID, platformID string) *Instance { + i := &Instance{ + root: root, + localID: localID, + watchers: make(map[WatchSubscriptionID]*watchSubscription), + } + i.platformID = goldfish.New[string](1*time.Second, i.fetchPlatformID) + if platformID != "" { + i.platformID.Set(platformID) + } + return i +} + +func (i *Instance) Tutorial() string { + return i.tutorial +} + +// Root returns the filesystem path for the app root. +// It always returns a non-empty string. +func (i *Instance) Root() string { return i.root } + +// LocalID reports a local, random id unique for this app, +// as persisted in the .encore/manifest.json file. +// It always returns a non-empty string. +func (i *Instance) LocalID() string { return i.localID } + +// PlatformID reports the Encore Platform's ID for this app. +// If the app is not linked it reports the empty string. +func (i *Instance) PlatformID() string { + val, _ := i.platformID.Get() + return val +} + +// PlatformOrLocalID reports PlatformID() if set and otherwise LocalID(). +func (i *Instance) PlatformOrLocalID() string { + if id := i.PlatformID(); id != "" { + return id + } + return i.localID +} + +// Name returns the platform ID for the app, or if there isn't one +// it returns the folder name the app is in. +func (i *Instance) Name() string { + if id := i.PlatformID(); id != "" { + return id + } + + return filepath.Base(i.root) +} + +func (i *Instance) fetchPlatformID() (string, error) { + return readPlatformID(i.root) +} + +func readPlatformID(appRoot string) (string, error) { + // Parse the encore.app file + path := filepath.Join(appRoot, appfile.Name) + data, err := os.ReadFile(path) + if err != nil { + return "", err + } + encore, err := appfile.Parse(data) + if err != nil { + return "", errors.Wrap(err, "parse encore.app") + } + return encore.ID, nil +} + +// Experiments returns the enabled experiments for this app. +// +// Note: we read the app file here instead of a cached value so we +// can detect changes between runs of the compiler if we're in +// watch mode. +func (i *Instance) Experiments(environ []string) (*experiments.Set, error) { + exp, err := appfile.Experiments(i.root) + if err != nil { + return nil, err + } + + return experiments.FromAppFileAndEnviron(exp, environ) +} + +func (i *Instance) Lang() appfile.Lang { + appFile, err := appfile.ParseFile(filepath.Join(i.root, appfile.Name)) + if err != nil { + return appfile.LangGo + } + return appFile.Lang +} + +func (i *Instance) AppFile() (*appfile.File, error) { + return appfile.ParseFile(filepath.Join(i.root, appfile.Name)) +} + +func (i *Instance) BuildSettings() (appfile.Build, error) { + appFile, err := appfile.ParseFile(filepath.Join(i.root, appfile.Name)) + if err != nil { + return appfile.Build{}, err + } + return appFile.Build, nil +} + +// GlobalCORS returns the CORS configuration for the app which +// will be applied against all API gateways into the app +func (i *Instance) GlobalCORS() (appfile.CORS, error) { + cors, err := appfile.GlobalCORS(i.root) + if err != nil { + return appfile.CORS{}, err + } + + // If there are no Global CORS return the default + if cors == nil { + return appfile.CORS{}, nil + } + + return *cors, nil + +} + +func (i *Instance) Watch(fn WatchFunc) (WatchSubscriptionID, error) { + if err := i.beginWatch(); err != nil { + return 0, err + } + + i.watchMu.Lock() + i.nextWatchID++ + id := i.nextWatchID + i.watchers[id] = &watchSubscription{id, fn} + i.watchMu.Unlock() + return id, nil +} + +func (i *Instance) Unwatch(id WatchSubscriptionID) { + i.watchMu.Lock() + delete(i.watchers, id) + i.watchMu.Unlock() +} + +func (i *Instance) beginWatch() error { + return i.setupWatch.Do(func() error { + watch, err := watcher.New(i.PlatformOrLocalID()) + if err != nil { + return errors.Wrap(err, "unable to create watcher") + } + i.watcher = watch + + if err := i.watcher.RecursivelyWatch(i.root); err != nil { + return errors.Wrap(err, "unable to watch app") + } + + // If we're in dev mode, we want to watch the runtime + // too, so we can develop changes to the runtime without + // needing to restart the application. + if conf.DevDaemon { + if err := i.watcher.RecursivelyWatch(env.EncoreRuntimesPath()); err != nil { + return errors.Wrap(err, "unable to watch runtime") + } + } + + go func() { + for { + events, ok := i.watcher.WaitForEvents() + if !ok { + // We're done watching. + return + } + + if i.mgr != nil { + i.mgr.onWatchEvent(i, events) + } + + i.watchMu.Lock() + watchers := i.watchers + i.watchMu.Unlock() + for _, sub := range watchers { + sub.f(i, events) + } + } + }() + + return nil + }) +} + +// CachePath returns the path to the cache directory for this app. +// It creates the directory if it does not exist. +func (i *Instance) CachePath() (string, error) { + cacheDir, err := conf.CacheDir() + if err != nil { + return "", errors.Wrap(err, "unable to get encore cache dir") + } + + // we use local ID to be stable if the app is linked to the platform later + cacheDir = filepath.Join(cacheDir, i.localID) + if err := os.MkdirAll(cacheDir, 0755); err != nil { + return "", errors.Wrap(err, "unable to create app cache dir") + } + + return cacheDir, nil +} + +// CacheMetadata caches the metadata for this app onto the file system +func (i *Instance) CacheMetadata(md *meta.Data) error { + i.mdMu.Lock() + defer i.mdMu.Unlock() + + i.cachedMd = md + + cacheDir, err := i.CachePath() + if err != nil { + return err + } + + data, err := proto.Marshal(md) + if err != nil { + return errors.Wrap(err, "unable to marshal metadata") + } + + err = xos.WriteFile(filepath.Join(cacheDir, "metadata.pb"), data, 0644) + if err != nil { + return errors.Wrap(err, "unable to write metadata") + } + + return nil +} + +// CachedMetadata returns the cached metadata for this app, if any +func (i *Instance) CachedMetadata() (*meta.Data, error) { + i.mdMu.Lock() + defer i.mdMu.Unlock() + + if i.cachedMd != nil { + return i.cachedMd, nil + } + + cacheDir, err := i.CachePath() + if err != nil { + return nil, err + } + + data, err := os.ReadFile(filepath.Join(cacheDir, "metadata.pb")) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil, nil + } + return nil, errors.Wrap(err, "unable to read metadata") + } + + md := &meta.Data{} + err = proto.Unmarshal(data, md) + if err != nil { + return nil, errors.Wrap(err, "unable to unmarshal metadata") + } + + i.cachedMd = md + return md, nil +} + +func (i *Instance) Close() error { + if i.watcher != nil { + return i.watcher.Close() + } + return nil +} + +type WatchSubscriptionID int64 + +type watchSubscription struct { + id WatchSubscriptionID + f WatchFunc +} diff --git a/cli/daemon/check.go b/cli/daemon/check.go new file mode 100644 index 0000000000..09f306cddd --- /dev/null +++ b/cli/daemon/check.go @@ -0,0 +1,39 @@ +package daemon + +import ( + "encr.dev/cli/daemon/run" + daemonpb "encr.dev/proto/encore/daemon" +) + +// Check checks the app for compilation errors. +func (s *Server) Check(req *daemonpb.CheckRequest, stream daemonpb.Daemon_CheckServer) error { + slog := &streamLog{stream: stream, buffered: false} + log := newStreamLogger(slog) + + app, err := s.apps.Track(req.AppRoot) + if err != nil { + log.Error().Err(err).Msg("failed to resolve app") + streamExit(stream, 1) + return nil + } + + buildDir, err := s.mgr.Check(stream.Context(), run.CheckParams{ + App: app, + WorkingDir: req.WorkingDir, + CodegenDebug: req.CodegenDebug, + Environ: req.Environ, + Tests: req.ParseTests, + }) + + exitCode := 0 + if err != nil { + exitCode = 1 + log.Error().Msg(err.Error()) + } + + if req.CodegenDebug && buildDir != "" { + log.Info().Msgf("wrote generated code to: %s", buildDir) + } + streamExit(stream, exitCode) + return nil +} diff --git a/cli/daemon/common.go b/cli/daemon/common.go new file mode 100644 index 0000000000..0465b5dc32 --- /dev/null +++ b/cli/daemon/common.go @@ -0,0 +1,178 @@ +package daemon + +import ( + "io" + "net" + "os" + "runtime" + "strconv" + "strings" + "syscall" + + "github.com/logrusorgru/aurora/v3" + + "encr.dev/cli/daemon/run" + "encr.dev/cli/internal/onboarding" + "encr.dev/pkg/errlist" + meta "encr.dev/proto/encore/parser/meta/v1" +) + +// OnStart implements run.EventListener. +func (s *Server) OnStart(r *run.Run) {} + +func (s *Server) OnCompileStart(r *run.Run) {} + +// OnReload implements run.EventListener. +func (s *Server) OnReload(r *run.Run) {} + +// OnStop implements run.EventListener. +func (s *Server) OnStop(r *run.Run) {} + +// OnStdout implements run.EventListener. +func (s *Server) OnStdout(r *run.Run, line []byte) { + s.mu.Lock() + slog, ok := s.streams[r.ID] + s.mu.Unlock() + + if ok { + _, _ = slog.Stdout(true).Write(line) + } +} + +// OnStderr implements run.EventListener. +func (s *Server) OnStderr(r *run.Run, line []byte) { + s.mu.Lock() + slog, ok := s.streams[r.ID] + s.mu.Unlock() + + if ok { + _, _ = slog.Stderr(true).Write(line) + } +} + +func (s *Server) OnError(r *run.Run, err *errlist.List) { + s.mu.Lock() + slog, ok := s.streams[r.ID] + s.mu.Unlock() + + if ok { + slog.Error(err) + } +} + +func showFirstRunExperience(run *run.Run, md *meta.Data, stdout io.Writer) { + if state, err := onboarding.Load(); err == nil { + if !state.FirstRun.IsSet() { + // Is there a suitable endpoint to call? + var rpc *meta.RPC + var command string + for _, svc := range md.Svcs { + for _, r := range svc.Rpcs { + if cmd := genCurlCommand(run, md, r); rpc == nil || len(command) < len(cmd) { + rpc = r + command = cmd + } + } + } + if rpc != nil { + state.FirstRun.Set() + if err := state.Write(); err == nil { + _, _ = stdout.Write([]byte(aurora.Sprintf("\nHint: make an API call by running: %s\n", aurora.Cyan(command)))) + } + } + } + } +} + +// findAvailableAddr attempts to find an available host:port that's near +// the given startAddr. +func findAvailableAddr(startAddr string) (host string, port int, ok bool) { + host, portStr, err := net.SplitHostPort(startAddr) + if err != nil { + host = "localhost" + portStr = "4000" + } + startPort, err := strconv.Atoi(portStr) + if err != nil { + startPort = 4000 + } + + for p := startPort + 1; p <= startPort+10 && p <= 65535; p++ { + addr := host + ":" + strconv.Itoa(p) + ln, err := net.Listen("tcp", addr) + if err == nil { + _ = ln.Close() + return host, p, true + } + } + return "", 0, false +} + +func genCurlCommand(run *run.Run, md *meta.Data, rpc *meta.RPC) string { + var payload []byte + method := rpc.HttpMethods[0] + switch method { + case "GET", "HEAD", "DELETE": + // doesn't use HTTP body payloads + default: + payload = genSchema(md, rpc.RequestSchema) + } + + var segments []string + for _, seg := range rpc.Path.Segments { + var v string + switch seg.Type { + default: + v = "foo" + case meta.PathSegment_LITERAL: + v = seg.Value + case meta.PathSegment_WILDCARD, meta.PathSegment_FALLBACK: + v = "foo" + case meta.PathSegment_PARAM: + switch seg.ValueType { + case meta.PathSegment_STRING: + v = "foo" + case meta.PathSegment_BOOL: + v = "true" + case meta.PathSegment_INT8, meta.PathSegment_INT16, meta.PathSegment_INT32, meta.PathSegment_INT64, + meta.PathSegment_UINT8, meta.PathSegment_UINT16, meta.PathSegment_UINT32, meta.PathSegment_UINT64: + v = "1" + case meta.PathSegment_UUID: + v = "be23a21f-d12c-432c-91ec-fb8a52e23967" // some random UUID + default: + v = "foo" + } + } + segments = append(segments, v) + } + + parts := []string{"curl"} + if (payload != nil && method != "POST") || (payload == nil && method != "GET") { + parts = append(parts, " -X ", method) + } + // nosemgrep + path := "/" + strings.Join(segments, "/") + parts = append(parts, " http://", run.ListenAddr, path) + if payload != nil { + parts = append(parts, " -d '", string(payload), "'") + } + return strings.Join(parts, "") +} + +// errIsAddrInUse reports whether the error is due to the address already being in use. +func errIsAddrInUse(err error) bool { + if opErr, ok := err.(*net.OpError); ok { + if syscallErr, ok := opErr.Err.(*os.SyscallError); ok { + if errno, ok := syscallErr.Err.(syscall.Errno); ok { + const WSAEADDRINUSE = 10048 + switch { + case errno == syscall.EADDRINUSE: + return true + case runtime.GOOS == "windows" && errno == WSAEADDRINUSE: + return true + } + } + } + } + return false +} diff --git a/cli/daemon/create.go b/cli/daemon/create.go new file mode 100644 index 0000000000..06b064dd89 --- /dev/null +++ b/cli/daemon/create.go @@ -0,0 +1,21 @@ +package daemon + +import ( + "context" + + "encr.dev/cli/daemon/apps" + daemonpb "encr.dev/proto/encore/daemon" +) + +// CreateApp adds tracking for a new app +func (s *Server) CreateApp(ctx context.Context, req *daemonpb.CreateAppRequest) (*daemonpb.CreateAppResponse, error) { + var options []apps.TrackOption + if req.Tutorial { + options = append(options, apps.WithTutorial(req.Template)) + } + app, err := s.apps.Track(req.AppRoot, options...) + if err != nil { + return nil, err + } + return &daemonpb.CreateAppResponse{AppId: app.PlatformOrLocalID()}, nil +} diff --git a/cli/daemon/daemon.go b/cli/daemon/daemon.go index 3bc1ad74f6..463ed23e6b 100644 --- a/cli/daemon/daemon.go +++ b/cli/daemon/daemon.go @@ -2,164 +2,228 @@ package daemon import ( + "bytes" "context" - "fmt" "io" + "strings" "sync" + "sync/atomic" + "time" - "encr.dev/cli/daemon/internal/appfile" - "encr.dev/cli/daemon/run" - "encr.dev/cli/daemon/secret" - "encr.dev/cli/daemon/sqldb" - "encr.dev/cli/internal/codegen" - daemonpb "encr.dev/proto/encore/daemon" - meta "encr.dev/proto/encore/parser/meta/v1" - "encr.dev/proto/encore/server/remote" + "github.com/cockroachdb/errors" "github.com/golang/protobuf/ptypes/empty" "github.com/rs/zerolog" + "github.com/rs/zerolog/log" "google.golang.org/genproto/googleapis/rpc/errdetails" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" + + "encr.dev/cli/daemon/apps" + "encr.dev/cli/daemon/mcp" + "encr.dev/cli/daemon/namespace" + "encr.dev/cli/daemon/run" + "encr.dev/cli/daemon/secret" + "encr.dev/cli/daemon/sqldb" + "encr.dev/cli/internal/platform" + "encr.dev/cli/internal/update" + "encr.dev/internal/version" + "encr.dev/pkg/builder" + "encr.dev/pkg/builder/builderimpl" + "encr.dev/pkg/clientgen" + "encr.dev/pkg/clientgen/clientgentypes" + "encr.dev/pkg/errlist" + "encr.dev/pkg/fns" + daemonpb "encr.dev/proto/encore/daemon" + meta "encr.dev/proto/encore/parser/meta/v1" ) var _ daemonpb.DaemonServer = (*Server)(nil) // Server implements daemonpb.DaemonServer. type Server struct { - version string - mgr *run.Manager - cm *sqldb.ClusterManager - sm *secret.Manager - rc remote.RemoteClient + apps *apps.Manager + mgr *run.Manager + cm *sqldb.ClusterManager + sm *secret.Manager + ns *namespace.Manager + mcp *mcp.Manager + + mu sync.Mutex + streams map[string]*streamLog // run id -> stream - mu sync.Mutex - streams map[string]daemonpb.Daemon_RunServer // run id -> stream - appRoots map[string]string // cache of app id -> app root + availableVerInit sync.Once + availableVer atomic.Value // string + + appDebounceMu sync.Mutex + appDebouncers map[*apps.Instance]*regenerateCodeDebouncer daemonpb.UnimplementedDaemonServer } // New creates a new Server. -func New(version string, mgr *run.Manager, cm *sqldb.ClusterManager, sm *secret.Manager, rc remote.RemoteClient) *Server { +func New(appsMgr *apps.Manager, mgr *run.Manager, cm *sqldb.ClusterManager, sm *secret.Manager, ns *namespace.Manager, mcp *mcp.Manager) *Server { srv := &Server{ - version: version, - mgr: mgr, - cm: cm, - sm: sm, - rc: rc, - streams: make(map[string]daemonpb.Daemon_RunServer), - appRoots: make(map[string]string), + apps: appsMgr, + mgr: mgr, + cm: cm, + sm: sm, + ns: ns, + mcp: mcp, + streams: make(map[string]*streamLog), + + appDebouncers: make(map[*apps.Instance]*regenerateCodeDebouncer), } + mgr.AddListener(srv) + + // Check immediately for the latest version to avoid blocking 'encore run' + go srv.availableUpdate() + + // Begin watching known apps for changes + go srv.watchApps() + return srv } // GenClient generates a client based on the app's API. func (s *Server) GenClient(ctx context.Context, params *daemonpb.GenClientRequest) (*daemonpb.GenClientResponse, error) { var md *meta.Data - if params.EnvName == "local" { + + envName := params.EnvName + if envName == "" { + envName = "local" + } + + if envName == "local" { // Determine the app root - s.mu.Lock() - appRoot, ok := s.appRoots[params.AppId] - s.mu.Unlock() - if !ok { + app, err := s.apps.FindLatestByPlatformOrLocalID(params.AppId) + if errors.Is(err, apps.ErrNotFound) { return nil, status.Errorf(codes.FailedPrecondition, "the app %s must be run locally before generating a client for the 'local' environment.", params.AppId) + } else if err != nil { + return nil, status.Errorf(codes.Internal, "unable to query app info: %v", err) } // Get the app metadata - result, err := s.parseApp(appRoot, ".", false) + expSet, err := app.Experiments(nil) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "failed to parse app experiments: %v", err) + } + + // Parse the app to figure out what infrastructure is needed. + bld := builderimpl.Resolve(app.Lang(), expSet) + defer fns.CloseIgnore(bld) + parse, err := bld.Parse(ctx, builder.ParseParams{ + Build: builder.DefaultBuildInfo(), + App: app, + Experiments: expSet, + WorkingDir: ".", + ParseTests: false, + }) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "failed to parse app metadata: %v", err) } - md = result.Meta + md = parse.Meta + + if err := app.CacheMetadata(md); err != nil { + return nil, status.Errorf(codes.Internal, "failed to cache app metadata: %v", err) + } } else { - meta, err := s.rc.Meta(ctx, &remote.MetaRequest{ - AppSlug: params.AppId, - EnvName: params.EnvName, - }) + meta, err := platform.GetEnvMeta(ctx, params.AppId, envName) if err != nil { - return nil, status.Errorf(status.Code(err), "could not fetch API metadata: %v", err) + if strings.Contains(err.Error(), "env_not_found") || strings.Contains(err.Error(), "env_not_deployed") { + if envName == "@primary" { + return nil, status.Error(codes.NotFound, "You have no deployments of this application.\n\nYou can generate the client for your local code by setting `--env=local`.") + } + return nil, status.Errorf(codes.NotFound, "A deployed environment called `%s` not found.\n\nYou can generate the client for your local code by setting `--env=local`.", envName) + } + return nil, status.Errorf(codes.Unavailable, "could not fetch API metadata: %v", err) } md = meta } - lang := codegen.Lang(params.Lang) - code, err := codegen.Client(lang, params.AppId, md) + lang := clientgen.Lang(params.Lang) + + servicesToGenerate := clientgentypes.NewServiceSet(md, params.Services, params.ExcludedServices) + tagSet := clientgentypes.NewTagSet(params.EndpointTags, params.ExcludedEndpointTags) + opts := clientgentypes.Options{} + if params.OpenapiExcludePrivateEndpoints != nil { + opts.OpenAPIExcludePrivateEndpoints = *params.OpenapiExcludePrivateEndpoints + } + if params.TsSharedTypes != nil { + opts.TSSharedTypes = *params.TsSharedTypes + } + if params.TsClientTarget != nil { + opts.TSClientTarget = *params.TsClientTarget + } + code, err := clientgen.Client(lang, params.AppId, md, servicesToGenerate, tagSet, opts) if err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } return &daemonpb.GenClientResponse{Code: code}, nil } -// SetSecret sets a secret key on the encore.dev platform. -func (s *Server) SetSecret(ctx context.Context, req *daemonpb.SetSecretRequest) (*daemonpb.SetSecretResponse, error) { - // Get the app id from the app file - appSlug, err := appfile.Slug(req.AppRoot) - if err != nil { - return nil, status.Errorf(codes.InvalidArgument, err.Error()) - } else if appSlug == "" { - return nil, errNotLinked - } - - resp, err := s.rc.SetSecret(ctx, &remote.SetSecretRequest{ - AppSlug: appSlug, - Key: req.Key, - Value: req.Value, - Type: remote.SetSecretRequest_Type(req.Type), - }) +func (s *Server) SecretsRefresh(ctx context.Context, req *daemonpb.SecretsRefreshRequest) (*daemonpb.SecretsRefreshResponse, error) { + app, err := s.apps.Track(req.AppRoot) if err != nil { return nil, err } - go s.sm.UpdateKey(appSlug, req.Key, req.Value) - return &daemonpb.SetSecretResponse{Created: resp.Created}, nil + s.sm.UpdateKey(app.PlatformID(), req.Key, req.Value) + return &daemonpb.SecretsRefreshResponse{}, nil } // Version reports the daemon version. func (s *Server) Version(context.Context, *empty.Empty) (*daemonpb.VersionResponse, error) { - return &daemonpb.VersionResponse{Version: s.version}, nil -} - -// Logs streams logs from the encore.dev platform. -func (s *Server) Logs(params *daemonpb.LogsRequest, stream daemonpb.Daemon_LogsServer) error { - appSlug, err := appfile.Slug(params.AppRoot) + configHash, err := version.ConfigHash() if err != nil { - return status.Errorf(codes.InvalidArgument, err.Error()) - } else if appSlug == "" { - return errNotLinked + return nil, err } - logs, err := s.rc.Logs(stream.Context(), &remote.LogsRequest{ - AppSlug: appSlug, - EnvName: params.EnvName, - }) - if err != nil { - return err - } - for { - msg, err := logs.Recv() - if status.Code(err) == codes.Canceled { - return nil - } else if err != nil { - return err - } - err = stream.Send(&daemonpb.LogsMessage{ - Lines: msg.Lines, - DropNotice: msg.DropNotice, - }) + return &daemonpb.VersionResponse{ + Version: version.Version, + ConfigHash: configHash, + }, nil +} + +// availableUpdate checks for updates to Encore. +// If there is a new version it returns it as a semver string. +func (s *Server) availableUpdate() *update.LatestVersion { + check := func() *update.LatestVersion { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + ver, err := update.Check(ctx) if err != nil { - return err + log.Error().Err(err).Msg("could not check for new encore release") } + return ver } -} -// cacheAppRoot adds the appID -> appRoot mapping to the app root cache. -func (s *Server) cacheAppRoot(appID, appRoot string) { - s.mu.Lock() - defer s.mu.Unlock() - s.appRoots[appID] = appRoot + s.availableVerInit.Do(func() { + ver := check() + s.availableVer.Store(ver) + go func() { + for { + time.Sleep(1 * time.Hour) + if ver := check(); ver != nil { + s.availableVer.Store(ver) + } + } + }() + }) + + curr := version.Version + latest := s.availableVer.Load().(*update.LatestVersion) + if latest.IsNewer(curr) { + return latest + } + return nil } +var errDatabaseNotFound = (func() error { + st := status.New(codes.NotFound, "database not found") + return st.Err() +})() + var errNotLinked = (func() error { st, err := status.New(codes.FailedPrecondition, "app not linked").WithDetails( &errdetails.PreconditionFailure{ @@ -181,35 +245,32 @@ type commandStream interface { Send(msg *daemonpb.CommandMessage) error } -func newStreamLogger(stream commandStream) zerolog.Logger { - return zerolog.New(zerolog.ConsoleWriter{Out: zerolog.SyncWriter(streamWriter{stream: stream})}) +func newStreamLogger(slog *streamLog) zerolog.Logger { + return zerolog.New(zerolog.SyncWriter(slog.Stderr(false))).With().Timestamp().Logger() } type streamWriter struct { - stream commandStream + mu *sync.Mutex + sl *streamLog stderr bool // if true write to stderr, otherwise stdout + buffer bool } func (w streamWriter) Write(b []byte) (int, error) { - out := &daemonpb.CommandOutput{} - if w.stderr { - out.Stderr = b - } else { - out.Stdout = b - } - err := w.stream.Send(&daemonpb.CommandMessage{ - Msg: &daemonpb.CommandMessage_Output{ - Output: out, - }, - }) - if err != nil { - return 0, err + w.mu.Lock() + defer w.mu.Unlock() + if w.buffer && w.sl.buffered { + if w.stderr { + return w.sl.writeBuffered(&w.sl.stderr, b) + } else { + return w.sl.writeBuffered(&w.sl.stdout, b) + } } - return len(b), nil + return w.sl.writeStream(w.stderr, b) } func streamExit(stream commandStream, code int) { - stream.Send(&daemonpb.CommandMessage{Msg: &daemonpb.CommandMessage_Exit{ + _ = stream.Send(&daemonpb.CommandMessage{Msg: &daemonpb.CommandMessage_Exit{ Exit: &daemonpb.CommandExit{ Code: int32(code), }, @@ -218,31 +279,66 @@ func streamExit(stream commandStream, code int) { type streamLog struct { stream commandStream + mu sync.Mutex + + buffered bool + stdout *bytes.Buffer // lazily allocated + stderr *bytes.Buffer // lazily allocated } -func (log streamLog) Stdout() io.Writer { - return streamWriter{stream: log.stream, stderr: false} +func (log *streamLog) Stdout(buffer bool) io.Writer { + return streamWriter{mu: &log.mu, sl: log, stderr: false, buffer: buffer} } -func (log streamLog) Stderr() io.Writer { - return streamWriter{stream: log.stream, stderr: true} +func (log *streamLog) Stderr(buffer bool) io.Writer { + return streamWriter{mu: &log.mu, sl: log, stderr: true, buffer: buffer} } -type runStreamAdapter struct { - stream daemonpb.Daemon_RunServer +func (log *streamLog) Error(err *errlist.List) { + log.mu.Lock() + defer log.mu.Unlock() + _ = err.SendToStream(log.stream) } -func (a runStreamAdapter) Send(msg *daemonpb.CommandMessage) error { - switch msg := msg.Msg.(type) { - case *daemonpb.CommandMessage_Output: - return a.stream.Send(&daemonpb.RunMessage{ - Msg: &daemonpb.RunMessage_Output{Output: msg.Output}, - }) - case *daemonpb.CommandMessage_Exit: - return a.stream.Send(&daemonpb.RunMessage{ - Msg: &daemonpb.RunMessage_Exit{Exit: msg.Exit}, - }) - default: - panic(fmt.Sprintf("unknown CommandMessage type %T", msg)) +func (log *streamLog) FlushBuffers() { + var stdout, stderr []byte + log.mu.Lock() + defer log.mu.Unlock() + if b := log.stdout; b != nil { + stdout = b.Bytes() + log.stdout = nil + } + if b := log.stderr; b != nil { + stderr = b.Bytes() + log.stderr = nil + } + + _, _ = log.writeStream(false, stderr) + _, _ = log.writeStream(true, stdout) + log.buffered = false +} + +func (log *streamLog) writeBuffered(b **bytes.Buffer, p []byte) (int, error) { + if *b == nil { + *b = &bytes.Buffer{} } + return (*b).Write(p) +} + +func (log *streamLog) writeStream(stderr bool, b []byte) (int, error) { + out := &daemonpb.CommandOutput{} + if stderr { + out.Stderr = b + } else { + out.Stdout = b + } + err := log.stream.Send(&daemonpb.CommandMessage{ + Msg: &daemonpb.CommandMessage_Output{ + Output: out, + }, + }) + if err != nil { + return 0, err + } + return len(b), nil } diff --git a/cli/daemon/dash/ai/assembler.go b/cli/daemon/dash/ai/assembler.go new file mode 100644 index 0000000000..289930e1bb --- /dev/null +++ b/cli/daemon/dash/ai/assembler.go @@ -0,0 +1,278 @@ +package ai + +import ( + "context" + "slices" + "strings" + + "encr.dev/pkg/fns" + "encr.dev/pkg/idents" + "encr.dev/v2/parser/apis/api/apienc" +) + +// partialEndpoint is a helper struct that is used to assemble the endpoint +// from the incoming websocket updates. +type partialEndpoint struct { + service string + endpoint *Endpoint +} + +// notification generates a partially assembled endpoint structure to return to the client +func (e *partialEndpoint) notification() LocalEndpointUpdate { + e.endpoint.EndpointSource = e.endpoint.Render() + e.endpoint.TypeSource = "" + for i, s := range e.endpoint.Types { + if i > 0 { + e.endpoint.TypeSource += "\n\n" + } + e.endpoint.TypeSource += s.Render() + } + return LocalEndpointUpdate{ + Service: e.service, + Endpoint: e.endpoint, + Type: "EndpointUpdate", + } +} + +func (e *partialEndpoint) upsertType(name, doc string) *Type { + if name == "" { + return nil + } + for _, s := range e.endpoint.Types { + if s.Name == name { + if doc != "" { + s.Doc = wrapDoc(doc, 77) + } + return s + } + } + si := &Type{Name: name, Doc: wrapDoc(doc, 77)} + e.endpoint.Types = append(e.endpoint.Types, si) + return si +} + +func wrapDoc(doc string, width int) string { + doc = strings.ReplaceAll(doc, "\n", " ") + doc = strings.TrimSpace(doc) + bytes := []byte(doc) + i := 0 + for { + start := i + if start+width >= len(bytes) { + break + } + i += width + for i > start && bytes[i] != ' ' { + i-- + } + if i > start { + bytes[i] = '\n' + } else { + for i < len(bytes) && bytes[i] != ' ' { + i++ + } + if i < len(bytes) { + bytes[i] = '\n' + } + } + } + return string(bytes) +} + +func (e *partialEndpoint) upsertError(err ErrorUpdate) *Error { + for _, s := range e.endpoint.Errors { + if s.Code == err.Code { + if err.Doc != "" { + s.Doc = wrapDoc(err.Doc, 60) + } + return s + } + } + si := &Error{Code: err.Code, Doc: wrapDoc(err.Doc, 60)} + e.endpoint.Errors = append(e.endpoint.Errors, si) + return si +} + +func (e *partialEndpoint) upsertPathParam(up PathParamUpdate) PathSegment { + for i, s := range e.endpoint.Path { + if s.Value != nil && *s.Value == up.Param { + if up.Doc != "" { + e.endpoint.Path[i].Doc = wrapDoc(up.Doc, 73) + } + return s + } + } + seg := PathSegment{ + Type: SegmentTypeParam, + ValueType: ptr[SegmentValueType]("string"), + Value: &up.Param, + Doc: wrapDoc(up.Doc, 73), + } + e.endpoint.Path = append(e.endpoint.Path, seg) + return seg +} + +func (e *partialEndpoint) upsertField(up TypeFieldUpdate) *Type { + if up.Struct == "" { + return nil + } + s := e.upsertType(up.Struct, "") + for _, f := range s.Fields { + if f.Name == up.Name { + if up.Doc != "" { + f.Doc = wrapDoc(up.Doc, 73) + } + if up.Type != "" { + f.Type = up.Type + } + return s + } + } + defaultLoc := apienc.Body + isRequest := up.Struct == e.endpoint.RequestType + if slices.Contains([]string{"GET", "HEAD", "DELETE"}, e.endpoint.Method) && isRequest { + defaultLoc = apienc.Query + } + fi := &TypeField{ + Name: up.Name, + Doc: wrapDoc(up.Doc, 73), + Type: up.Type, + Location: defaultLoc, + WireName: idents.Convert(up.Name, idents.CamelCase), + } + s.Fields = append(s.Fields, fi) + return s +} + +// The endpointsAssembler is a helper struct that is used to assemble the endpoint +// from the incoming websocket updates. It keeps track of the existing endpoints and services +// and updates them accordingly. +type endpointsAssembler struct { + eps map[string]*partialEndpoint +} + +func newEndpointAssembler(existing []Service) *endpointsAssembler { + eas := &endpointsAssembler{ + eps: make(map[string]*partialEndpoint), + } + for _, svc := range existing { + for _, ep := range svc.Endpoints { + key := svc.Name + "." + ep.Name + eas.eps[key] = &partialEndpoint{ + service: svc.Name, + endpoint: ep, + } + } + } + return eas +} + +func (s *endpointsAssembler) upsertEndpoint(e EndpointUpdate) *partialEndpoint { + for _, ep := range s.eps { + if ep.service != e.Service || ep.endpoint.Name != e.Name { + continue + } + if e.Doc != "" { + ep.endpoint.Doc = wrapDoc(e.Doc, 77) + } + if e.Method != "" { + ep.endpoint.Method = e.Method + } + if e.Visibility != "" { + ep.endpoint.Visibility = e.Visibility + } + if len(e.Path) > 0 { + ep.endpoint.Path = e.Path + } + if e.RequestType != "" { + ep.endpoint.RequestType = e.RequestType + ep.upsertType(e.RequestType, "") + } + if e.ResponseType != "" { + ep.endpoint.ResponseType = e.ResponseType + ep.upsertType(e.ResponseType, "") + } + if e.Errors != nil { + ep.endpoint.Errors = fns.Map(e.Errors, func(e string) *Error { + return &Error{Code: e} + }) + } + return ep + } + ep := &partialEndpoint{ + service: e.Service, + endpoint: &Endpoint{ + Name: e.Name, + Doc: wrapDoc(e.Doc, 77), + Method: e.Method, + Visibility: e.Visibility, + Path: e.Path, + RequestType: e.RequestType, + ResponseType: e.ResponseType, + Errors: fns.Map(e.Errors, func(e string) *Error { + return &Error{Code: e} + }), + Language: "GO", + }, + } + s.eps[e.Service+"."+e.Name] = ep + return ep +} + +func (s *endpointsAssembler) endpoint(service, endpoint string) *partialEndpoint { + key := service + "." + endpoint + ep, ok := s.eps[key] + if !ok { + ep := &partialEndpoint{ + service: service, + endpoint: &Endpoint{Name: endpoint}, + } + s.eps[key] = ep + } + return ep +} + +func newEndpointAssemblerHandler(existing []Service, notifier AINotifier, epComplete bool) AINotifier { + epCache := newEndpointAssembler(existing) + var lastEp *partialEndpoint + return func(ctx context.Context, msg *AINotification) error { + var ep *partialEndpoint + msgVal := msg.Value + switch val := msg.Value.(type) { + case TypeUpdate: + ep = epCache.endpoint(val.Service, val.Endpoint) + ep.upsertType(val.Name, val.Doc) + msgVal = ep.notification() + case TypeFieldUpdate: + ep = epCache.endpoint(val.Service, val.Endpoint) + ep.upsertField(val) + msgVal = ep.notification() + case EndpointUpdate: + ep = epCache.upsertEndpoint(val) + msgVal = ep.notification() + case ErrorUpdate: + ep = epCache.endpoint(val.Service, val.Endpoint) + ep.upsertError(val) + msgVal = ep.notification() + case PathParamUpdate: + ep = epCache.endpoint(val.Service, val.Endpoint) + ep.upsertPathParam(val) + msgVal = ep.notification() + } + if epComplete && lastEp != ep { + if lastEp != nil { + msg.Value = struct { + Type string `json:"type"` + Service string `json:"service"` + Endpoint string `json:"endpoint"` + }{"EndpointComplete", lastEp.service, lastEp.endpoint.Name} + if err := notifier(ctx, msg); err != nil || msg.Finished { + return err + } + } + lastEp = ep + } + msg.Value = msgVal + return notifier(ctx, msg) + } +} diff --git a/cli/daemon/dash/ai/client.go b/cli/daemon/dash/ai/client.go new file mode 100644 index 0000000000..18762a4c31 --- /dev/null +++ b/cli/daemon/dash/ai/client.go @@ -0,0 +1,153 @@ +package ai + +import ( + "context" + "fmt" + "time" + + "github.com/cockroachdb/errors" + "github.com/hasura/go-graphql-client" + "github.com/hasura/go-graphql-client/pkg/jsonutil" + "github.com/rs/zerolog/log" + + "encr.dev/internal/conf" +) + +type TaskMessage struct { + Type string `graphql:"__typename"` + + ServiceUpdate `graphql:"... on ServiceUpdate"` + TypeUpdate `graphql:"... on TypeUpdate"` + TypeFieldUpdate `graphql:"... on TypeFieldUpdate"` + ErrorUpdate `graphql:"... on ErrorUpdate"` + EndpointUpdate `graphql:"... on EndpointUpdate"` + SessionUpdate `graphql:"... on SessionUpdate"` + TitleUpdate `graphql:"... on TitleUpdate"` + PathParamUpdate `graphql:"... on PathParamUpdate"` +} + +func (u *TaskMessage) GetValue() AIUpdateType { + switch u.Type { + case "ServiceUpdate": + return u.ServiceUpdate + case "TypeUpdate": + return u.TypeUpdate + case "TypeFieldUpdate": + return u.TypeFieldUpdate + case "ErrorUpdate": + return u.ErrorUpdate + case "EndpointUpdate": + return u.EndpointUpdate + case "SessionUpdate": + return u.SessionUpdate + case "TitleUpdate": + return u.TitleUpdate + case "PathParamUpdate": + return u.PathParamUpdate + } + return nil +} + +type AIStreamMessage struct { + Value TaskMessage + Error string + Finished bool +} + +type aiTask struct { + Message *AIStreamMessage `graphql:"result"` +} + +func getClient(errHandler func(err error)) *graphql.SubscriptionClient { + client := graphql.NewSubscriptionClient(conf.WSBaseURL + "/graphql"). + WithRetryTimeout(5 * time.Second). + WithRetryDelay(2 * time.Second). + WithRetryStatusCodes("500-599"). + WithWebSocketOptions( + graphql.WebsocketOptions{ + HTTPClient: conf.AuthClient, + }).WithSyncMode(true) + go func() { + log.Info().Msg("starting ai client") + err := client.Run() + log.Info().Msg("closed ai client") + if err != nil { + errHandler(err) + } + }() + return client +} + +type AITask struct { + SubscriptionID string + client *graphql.SubscriptionClient +} + +func (t *AITask) Stop() error { + return t.client.Unsubscribe(t.SubscriptionID) +} + +// startAITask is a helper function to intitiate an AI query to the encore platform. The query +// should be assembled to stream a 'result' graphql field that is a AIStreamMessage. +func startAITask[Query any](ctx context.Context, params map[string]interface{}, notifier AINotifier) (*AITask, error) { + var subId string + var errStrReply = func(error string, code any) error { + log.Error().Msgf("ai error: %s (%v)", error, code) + _ = notifier(ctx, &AINotification{ + SubscriptionID: subId, + Error: &AIError{Message: error, Code: fmt.Sprintf("%v", code)}, + Finished: true, + }) + return graphql.ErrSubscriptionStopped + } + var errReply = func(err error) error { + var graphqlErr graphql.Errors + if errors.As(err, &graphqlErr) { + for _, e := range graphqlErr { + _ = errStrReply(e.Message, e.Extensions["code"]) + } + return graphql.ErrSubscriptionStopped + } + return errStrReply(err.Error(), "") + } + var query Query + client := getClient(func(err error) { _ = errReply(err) }) + subId, err := client.Subscribe(&query, params, func(message []byte, err error) error { + if err != nil { + return errReply(err) + } + var result aiTask + err = jsonutil.UnmarshalGraphQL(message, &result) + if err != nil { + return errReply(err) + } + if result.Message.Error != "" { + return errStrReply(result.Message.Error, "") + } + err = notifier(ctx, &AINotification{ + SubscriptionID: subId, + Value: result.Message.Value.GetValue(), + Finished: result.Message.Finished, + }) + if err != nil { + return errReply(err) + } + return nil + }) + return &AITask{SubscriptionID: subId, client: client}, err +} + +// AINotification is a wrapper around messages and errors from the encore platform ai service +type AINotification struct { + SubscriptionID string `json:"subscriptionId,omitempty"` + Value any `json:"value,omitempty"` + Error *AIError `json:"error,omitempty"` + Finished bool `json:"finished,omitempty"` +} + +type AIError struct { + Message string `json:"message"` + Code string `json:"code"` +} + +type AINotifier func(context.Context, *AINotification) error diff --git a/cli/daemon/dash/ai/codegen.go b/cli/daemon/dash/ai/codegen.go new file mode 100644 index 0000000000..ab167b5a55 --- /dev/null +++ b/cli/daemon/dash/ai/codegen.go @@ -0,0 +1,418 @@ +package ai + +import ( + "bytes" + "context" + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "path" + "path/filepath" + "runtime" + "strings" + + "golang.org/x/exp/maps" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/imports" + + "encr.dev/cli/daemon/apps" + "encr.dev/internal/env" + "encr.dev/pkg/fns" + "encr.dev/pkg/paths" + "encr.dev/v2/codegen/rewrite" + "encr.dev/v2/internals/perr" + "encr.dev/v2/internals/pkginfo" + "encr.dev/v2/parser/apis/api/apienc" + "encr.dev/v2/parser/apis/directive" +) + +const defAuthHandler = `package auth + +import ( + "context" + + "encore.dev/beta/auth" +) + +type Data struct { + Username string +} + +//encore:authhandler +func AuthHandler(ctx context.Context, token string) (auth.UID, *Data, error) { + panic("not yet implemented") +}` + +const ( + PathDocPrefix = "Path Parameters" + ErrDocPrefix = "Errors" +) + +func (p PathSegments) Render() (docPath string, goParams []string) { + var params []string + return "/" + path.Join(fns.Map(p, func(s PathSegment) string { + switch s.Type { + case SegmentTypeLiteral: + return *s.Value + case SegmentTypeParam: + params = append(params, fmt.Sprintf("%s %s", *s.Value, *s.ValueType)) + return fmt.Sprintf(":%s", *s.Value) + case SegmentTypeWildcard: + params = append(params, fmt.Sprintf("%s %s", *s.Value, SegmentValueTypeString)) + return fmt.Sprintf("*%s", *s.Value) + case SegmentTypeFallback: + params = append(params, fmt.Sprintf("%s %s", *s.Value, SegmentValueTypeString)) + return fmt.Sprintf("!%s", *s.Value) + default: + panic(fmt.Sprintf("unknown path segment type: %s", s.Type)) + } + })...), params +} + +func (s *Type) Render() string { + rtn := strings.Builder{} + if s.Doc != "" { + rtn.WriteString(fmtComment(strings.TrimSpace(s.Doc), 0, 1)) + } + rtn.WriteString(fmt.Sprintf("type %s struct {\n", s.Name)) + for i, f := range s.Fields { + if i > 0 { + rtn.WriteString("\n") + } + if f.Doc != "" { + rtn.WriteString(fmtComment(strings.TrimSpace(f.Doc), 2, 1)) + } + tags := "" + switch f.Location { + case apienc.Body: + tags = fmt.Sprintf(" `json:\"%s\"`", f.WireName) + case apienc.Query: + tags = fmt.Sprintf(" `query:\"%s\"`", f.WireName) + case apienc.Header: + tags = fmt.Sprintf(" `header:\"%s\"`", f.WireName) + } + rtn.WriteString(fmt.Sprintf(" %s %s%s\n", f.Name, f.Type, tags)) + } + rtn.WriteString("}") + return rtn.String() +} + +func (e *Endpoint) Render() string { + buf := strings.Builder{} + if e.Doc != "" { + buf.WriteString(fmtComment(strings.TrimSpace(e.Doc)+"\n", 0, 1)) + } + buf.WriteString(renderDocList(PathDocPrefix, e.Path)) + buf.WriteString(renderDocList(ErrDocPrefix, e.Errors)) + pathStr, pathParams := e.Path.Render() + params := []string{"ctx context.Context"} + params = append(params, pathParams...) + if e.RequestType != "" { + params = append(params, "req *"+e.RequestType) + } + var rtnParams []string + if e.ResponseType != "" { + rtnParams = append(rtnParams, "*"+e.ResponseType) + } + rtnParams = append(rtnParams, "error") + buf.WriteString(fmtComment("encore:api %s method=%s path=%s", 0, 0, e.Visibility, e.Method, pathStr)) + paramsStr := strings.Join(params, ", ") + rtnParamsStr := strings.Join(rtnParams, ", ") + if len(rtnParams) > 1 { + rtnParamsStr = fmt.Sprintf("(%s)", rtnParamsStr) + } + buf.WriteString(fmt.Sprintf("func %s(%s) %s", e.Name, paramsStr, rtnParamsStr)) + return buf.String() +} + +func indentItem(header, comment string) string { + buf := strings.Builder{} + buf.WriteString(header) + for i, line := range strings.Split(strings.TrimSpace(comment), "\n") { + indent := "" + if i > 0 { + indent = strings.Repeat(" ", len(header)) + } + buf.WriteString(fmt.Sprintf("%s%s\n", indent, line)) + } + return buf.String() +} + +func renderDocList[T interface{ DocItem() (string, string) }](header string, items []T) string { + maxLen := 0 + items = fns.Filter(items, func(p T) bool { + key, val := p.DocItem() + if val == "" { + return false + } + maxLen = max(maxLen, len(key)) + return true + }) + buf := strings.Builder{} + for i, item := range items { + if i == 0 { + buf.WriteString(header) + buf.WriteString(":\n") + } + key, value := item.DocItem() + spacing := strings.Repeat(" ", maxLen-len(key)) + itemHeader := fmt.Sprintf(" - %s: %s", key, spacing) + buf.WriteString(indentItem(itemHeader, value)) + } + return fmtComment(buf.String(), 0, 1) +} + +// fmtComment prepends '//' to each line of the given comment and indents it with the given number of spaces. +func fmtComment(comment string, before, after int, args ...any) string { + if comment == "" { + return "" + } + prefix := fmt.Sprintf("%s//%s", strings.Repeat(" ", before), strings.Repeat(" ", after)) + result := prefix + strings.ReplaceAll(comment, "\n", "\n"+prefix) + return fmt.Sprintf(result, args...) + "\n" +} + +// generateSrcFiles generates source files for the given services. +func generateSrcFiles(services []Service, app *apps.Instance) (map[paths.RelSlash]string, error) { + svcPaths, err := newServicePaths(app) + if err != nil { + return nil, err + } + needAuth := fns.Any(fns.FlatMap(services, Service.GetEndpoints), (*Endpoint).Auth) + files := map[paths.RelSlash]string{} + if needAuth { + md, err := app.CachedMetadata() + if err != nil { + return nil, err + } + if md.AuthHandler == nil { + relFile, err := svcPaths.RelFileName("auth", "handler") + if err != nil { + return nil, err + } + file := paths.FS(app.Root()).JoinSlash(relFile) + err = os.MkdirAll(file.Dir().ToIO(), 0755) + if err != nil { + return nil, err + } + files[relFile] = string(defAuthHandler) + } + } + for _, s := range services { + if svcPaths.IsNew(s.Name) { + relFile, err := svcPaths.RelFileName(s.Name, s.Name) + if err != nil { + return nil, err + } + file := paths.FS(app.Root()).JoinSlash(relFile) + err = os.MkdirAll(file.Dir().ToIO(), 0755) + if err != nil { + return nil, err + } + files[relFile] = fmt.Sprintf("%spackage %s\n", fmtComment(s.Doc, 0, 1), strings.ToLower(s.Name)) + } + for _, e := range s.Endpoints { + relFile, err := svcPaths.RelFileName(s.Name, e.Name) + if err != nil { + return nil, err + } + filePath := paths.FS(app.Root()).JoinSlash(relFile) + _, content := toSrcFile(filePath, s.Name, e.EndpointSource, e.TypeSource) + files[relFile], err = addMissingFuncBodies(content) + if err != nil { + return nil, err + } + } + } + return files, nil +} + +// addMissingFuncBodies adds a panic statement to functions that are missing a body. +// This is used to generate a valid Go source file when the user has not implemented +// the body of the endpoint functions. +func addMissingFuncBodies(content []byte) (string, error) { + set := token.NewFileSet() + rewriter := rewrite.New(content, 0) + file, err := parser.ParseFile(set, "", content, parser.ParseComments|parser.AllErrors) + if err != nil { + return "", err + } + ast.Inspect(file, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.FuncDecl: + if n.Body != nil { + break + } + rewriter.Insert(n.End()-1, []byte(" {\n panic(\"not yet implemented\")\n}\n")) + } + return true + }) + return string(rewriter.Data()), err +} + +// writeFiles writes the generated source files to disk. +func writeFiles(services []Service, app *apps.Instance) ([]paths.RelSlash, error) { + files, err := generateSrcFiles(services, app) + if err != nil { + return nil, err + } + for fileName, content := range files { + root := paths.FS(app.Root()) + err = os.WriteFile(root.JoinSlash(fileName).ToIO(), []byte(content), 0644) + if err != nil { + return nil, err + } + } + return maps.Keys(files), nil +} + +// toSrcFile wraps a code fragment in a package declaration and adds missing imports +// using the goimports tool. +func toSrcFile(filePath paths.FS, svc string, srcs ...string) (offset token.Position, data []byte) { + const divider = "// @code-start\n" + header := fmt.Sprintf("package %s\n\n", strings.ToLower(svc)) + src := []byte(header + divider + strings.Join(srcs, "\n")) + importedSrc, err := imports.Process(filePath.ToIO(), src, &imports.Options{ + Comments: true, + TabIndent: false, + TabWidth: 4, + }) + // We don't need to handle the error here, as we'll catch parser/scanner errors in a later + // phase. This is just a best effort to import missing packages. + if err == nil { + src = importedSrc + } + codeOffset := bytes.Index(src, []byte(divider)) + // Remove the divider and any formatting made by the imports tool + src = append(src[:codeOffset], strings.Join(srcs, "\n")...) + // Compute offset of the user defined code + lines := strings.Split(string(src[:codeOffset]), "\n") + return token.Position{ + Filename: filePath.ToIO(), + Offset: codeOffset, + Line: len(lines) - 1, + Column: 0, + }, src +} + +// updateCode updates the source code fields of the EndpointInputs in the given services. +// if overwrite is set, the code will be regenerated from scratch and replace the existing code, +// otherwise, we'll modify the code in place +func updateCode(ctx context.Context, services []Service, app *apps.Instance, overwrite bool) (rtn *SyncResult, err error) { + overlays, err := newOverlays(app, overwrite, services...) + fset := token.NewFileSet() + perrs := perr.NewList(ctx, fset, overlays.ReadFile) + defer func() { + perr.CatchBailout(recover()) + if rtn == nil { + rtn = &SyncResult{ + Services: services, + } + } + rtn.Errors = overlays.validationErrors(perrs) + }() + for p, olay := range overlays.items { + astFile, err := parser.ParseFile(fset, p.ToIO(), olay.content, parser.ParseComments|parser.AllErrors) + if err != nil { + perrs.AddStd(err) + } + rewriter := rewrite.New(olay.content, int(astFile.FileStart)) + typeByName := map[string]*ast.GenDecl{} + funcByName := map[string]*ast.FuncDecl{} + for _, decl := range astFile.Decls { + switch decl := decl.(type) { + case *ast.GenDecl: + if decl.Tok != token.TYPE { + continue + } + for _, spec := range decl.Specs { + typeSpec := spec.(*ast.TypeSpec) + typeByName[typeSpec.Name.Name] = decl + } + case *ast.FuncDecl: + funcByName[decl.Name.Name] = decl + } + } + if olay.codeType == CodeTypeEndpoint { + funcDecl, ok := funcByName[olay.endpoint.Name] + if !ok { + for _, f := range funcByName { + dir, _, _ := directive.Parse(perrs, f.Doc) + if dir != nil && dir.Name == "api" { + funcDecl = f + break + } + } + } + if funcDecl != nil { + start := funcDecl.Pos() + if funcDecl.Doc != nil { + start = funcDecl.Doc.Pos() + } + end := funcDecl.End() + if funcDecl.Body != nil { + end = funcDecl.Body.Lbrace + } + rewriter.Replace(start, end, []byte(olay.endpoint.Render())) + } else { + if len(funcByName) > 0 { + rewriter.Append([]byte("\n")) + } + rewriter.Append([]byte(olay.endpoint.Render())) + } + olay.content = rewriter.Data() + content := string(olay.content[olay.headerOffset.Offset:]) + olay.endpoint.EndpointSource = strings.TrimSpace(content) + } else { + for _, typ := range olay.endpoint.Types { + typeSpec := typeByName[typ.Name] + code := typ.Render() + if typeSpec != nil { + start := typeSpec.Pos() + if typeSpec.Doc != nil { + start = typeSpec.Doc.Pos() + } + rewriter.Replace(start, typeSpec.End(), []byte(code)) + } else { + rewriter.Append([]byte("\n\n" + code)) + } + } + olay.content = rewriter.Data() + content := string(olay.content[olay.headerOffset.Offset:]) + olay.endpoint.TypeSource = strings.TrimSpace(content) + } + } + goRoot := paths.RootedFSPath(env.EncoreGoRoot(), ".") + + // Parse the end result to catch any syntax errors + pkginfo.UpdateGoPath(goRoot) + pkgs, err := packages.Load(&packages.Config{ + Mode: packages.NeedTypes | packages.NeedSyntax, + Dir: app.Root(), + Env: append(os.Environ(), + "GOOS="+runtime.GOOS, + "GOARCH="+runtime.GOARCH, + "GOROOT="+goRoot.ToIO(), + "PATH="+goRoot.Join("bin").ToIO()+string(filepath.ListSeparator)+os.Getenv("PATH"), + ), + Fset: fset, + Overlay: overlays.PkgOverlay(), + }, fns.Map(overlays.pkgPaths(), paths.Pkg.String)...) + if err != nil { + return nil, err + } + for _, pkg := range pkgs { + for _, err := range pkg.Errors { + // ignore missing function bodies error (it's allowed) + if strings.Contains(err.Error(), "missing function body") { + continue + } + perrs.AddStd(err) + } + } + return &SyncResult{ + Services: services, + }, nil +} diff --git a/cli/daemon/dash/ai/conv.go b/cli/daemon/dash/ai/conv.go new file mode 100644 index 0000000000..e149fd8ea0 --- /dev/null +++ b/cli/daemon/dash/ai/conv.go @@ -0,0 +1,142 @@ +package ai + +import ( + "slices" + "strings" + + "encr.dev/pkg/clientgen" + meta "encr.dev/proto/encore/parser/meta/v1" + schema "encr.dev/proto/encore/parser/schema/v1" + "encr.dev/v2/internals/resourcepaths" +) + +func toPathSegments(p *resourcepaths.Path, docs map[string]string) []PathSegment { + rtn := make([]PathSegment, 0, len(p.Segments)) + for _, s := range p.Segments { + switch s.Type { + case resourcepaths.Literal: + rtn = append(rtn, PathSegment{Type: SegmentTypeLiteral, Value: ptr(s.Value)}) + case resourcepaths.Param: + rtn = append(rtn, PathSegment{ + Type: SegmentTypeParam, + Value: ptr(s.Value), + ValueType: ptr(SegmentValueType(strings.ToLower(s.ValueType.String()))), + Doc: docs[s.Value], + }) + case resourcepaths.Wildcard: + rtn = append(rtn, PathSegment{ + Type: SegmentTypeWildcard, + Value: ptr(s.Value), + ValueType: ptr(SegmentValueType(strings.ToLower(s.ValueType.String()))), + Doc: docs[s.Value], + }) + case resourcepaths.Fallback: + rtn = append(rtn, PathSegment{ + Type: SegmentTypeFallback, + Value: ptr(s.Value), + ValueType: ptr(SegmentValueType(strings.ToLower(s.ValueType.String()))), + Doc: docs[s.Value], + }) + } + } + return rtn +} + +func metaPathToPathSegments(metaPath *meta.Path) []PathSegment { + var segments []PathSegment + for _, seg := range metaPath.Segments { + segments = append(segments, PathSegment{ + Type: toSegmentType(seg.Type), + Value: ptr(seg.Value), + ValueType: ptr(toSegmentValueType(seg.ValueType)), + }) + } + return segments +} + +func toSegmentValueType(valueType meta.PathSegment_ParamType) SegmentValueType { + switch valueType { + case meta.PathSegment_UUID: + return "string" + default: + return SegmentValueType(strings.ToLower(valueType.String())) + } +} + +func toSegmentType(segmentType meta.PathSegment_SegmentType) SegmentType { + switch segmentType { + case meta.PathSegment_LITERAL: + return SegmentTypeLiteral + case meta.PathSegment_PARAM: + return SegmentTypeParam + case meta.PathSegment_WILDCARD: + return SegmentTypeWildcard + case meta.PathSegment_FALLBACK: + return SegmentTypeFallback + default: + panic("unknown segment type") + } +} + +func toVisibility(accessType meta.RPC_AccessType) VisibilityType { + switch accessType { + case meta.RPC_PUBLIC: + return VisibilityTypePublic + case meta.RPC_PRIVATE: + return VisibilityTypePrivate + case meta.RPC_AUTH: + return "" + default: + panic("unknown access type") + } +} + +func renderTypesFromMetadata(md *meta.Data, svcs ...string) string { + var types []*schema.Decl + for _, metaSvc := range md.Svcs { + if len(svcs) > 0 && !slices.Contains(svcs, metaSvc.Name) { + continue + } + for _, rpc := range metaSvc.Rpcs { + if rpc.RequestSchema != nil { + types = append(types, md.Decls[rpc.RequestSchema.GetNamed().Id]) + } + if rpc.ResponseSchema != nil { + types = append(types, md.Decls[rpc.ResponseSchema.GetNamed().Id]) + } + } + } + src, _ := clientgen.GenTypes(md, types...) + return string(src) +} + +func parseServicesFromMetadata(md *meta.Data, svcs ...string) []ServiceInput { + services := []ServiceInput{} + for _, metaSvc := range md.Svcs { + if len(svcs) > 0 && !slices.Contains(svcs, metaSvc.Name) { + continue + } + svc := ServiceInput{ + Name: metaSvc.Name, + } + for _, rpc := range metaSvc.Rpcs { + ep := &Endpoint{ + Name: rpc.Name, + Method: rpc.HttpMethods[0], + Visibility: toVisibility(rpc.AccessType), + Path: metaPathToPathSegments(rpc.Path), + } + if rpc.RequestSchema != nil { + decl := md.Decls[rpc.RequestSchema.GetNamed().Id] + ep.RequestType = decl.Name + } + if rpc.ResponseSchema != nil { + decl := md.Decls[rpc.ResponseSchema.GetNamed().Id] + ep.ResponseType = decl.Name + } + svc.Endpoints = append(svc.Endpoints, ep) + } + services = append(services, svc) + } + return services +} diff --git a/cli/daemon/dash/ai/manager.go b/cli/daemon/dash/ai/manager.go new file mode 100644 index 0000000000..3d740a457d --- /dev/null +++ b/cli/daemon/dash/ai/manager.go @@ -0,0 +1,91 @@ +package ai + +import ( + "context" + + "encr.dev/cli/daemon/apps" + "encr.dev/pkg/fns" + "encr.dev/pkg/paths" + meta "encr.dev/proto/encore/parser/meta/v1" +) + +var ErrorCodeMap = map[string]int64{ + "ai_task_limit_reached": 100, +} + +// Manager exposes the ai functionality to the local dashboard +type Manager struct{} + +func NewAIManager() *Manager { + return &Manager{} +} + +func (m *Manager) DefineEndpoints(ctx context.Context, appSlug string, sessionID AISessionID, prompt string, md *meta.Data, proposed []Service, notifier AINotifier) (*AITask, error) { + svcs := fns.Map(proposed, Service.GetName) + return startAITask[struct { + Message *AIStreamMessage `graphql:"result: defineEndpoints(appSlug: $appSlug, sessionID: $sessionID, prompt: $prompt, current: $current, proposedDesign: $proposedDesign, existingTypes: $existingTypes)"` + }](ctx, map[string]interface{}{ + "appSlug": appSlug, + "prompt": prompt, + "current": parseServicesFromMetadata(md, svcs...), + "proposedDesign": fns.Map(proposed, Service.GraphQL), + "sessionID": sessionID, + "existingTypes": renderTypesFromMetadata(md, svcs...), + }, newEndpointAssemblerHandler(proposed, notifier, true)) +} + +func (m *Manager) ProposeSystemDesign(ctx context.Context, appSlug, prompt string, md *meta.Data, notifier AINotifier) (*AITask, error) { + return startAITask[struct { + Message *AIStreamMessage `graphql:"result: proposeSystemDesign(appSlug: $appSlug, prompt: $prompt, current: $current)"` + }](ctx, map[string]interface{}{ + "appSlug": appSlug, + "prompt": prompt, + "current": parseServicesFromMetadata(md), + }, newEndpointAssemblerHandler(nil, notifier, false)) +} + +func (m *Manager) ModifySystemDesign(ctx context.Context, appSlug string, sessionID AISessionID, originalPrompt string, proposed []Service, newPrompt string, md *meta.Data, notifier AINotifier) (*AITask, error) { + return startAITask[struct { + Message *AIStreamMessage `graphql:"result: modifySystemDesign(appSlug: $appSlug, sessionID: $sessionID, originalPrompt: $originalPrompt, proposedDesign: $proposedDesign, newPrompt: $newPrompt, current: $current)"` + }](ctx, map[string]interface{}{ + "appSlug": appSlug, + "originalPrompt": originalPrompt, + "proposedDesign": fns.Map(proposed, Service.GraphQL), + "current": parseServicesFromMetadata(md), + "newPrompt": newPrompt, + "sessionID": sessionID, + }, newEndpointAssemblerHandler(proposed, notifier, false)) +} + +func (m *Manager) ParseCode(ctx context.Context, services []Service, app *apps.Instance) (*SyncResult, error) { + return parseCode(ctx, app, services) +} + +func (m *Manager) UpdateCode(ctx context.Context, services []Service, app *apps.Instance, overwrite bool) (*SyncResult, error) { + return updateCode(ctx, services, app, overwrite) +} + +type WriteFilesResponse struct { + FilesPaths []paths.RelSlash `json:"paths"` +} + +func (m *Manager) WriteFiles(ctx context.Context, services []Service, app *apps.Instance) (*WriteFilesResponse, error) { + files, err := writeFiles(services, app) + return &WriteFilesResponse{FilesPaths: files}, err +} + +type PreviewFile struct { + Path paths.RelSlash `json:"path"` + Content string `json:"content"` +} + +type PreviewFilesResponse struct { + Files []PreviewFile `json:"files"` +} + +func (m *Manager) PreviewFiles(ctx context.Context, services []Service, app *apps.Instance) (*PreviewFilesResponse, error) { + files, err := generateSrcFiles(services, app) + return &PreviewFilesResponse{Files: fns.TransformMapToSlice(files, func(k paths.RelSlash, v string) PreviewFile { + return PreviewFile{Path: k, Content: v} + })}, err +} diff --git a/cli/daemon/dash/ai/overlay.go b/cli/daemon/dash/ai/overlay.go new file mode 100644 index 0000000000..8976445522 --- /dev/null +++ b/cli/daemon/dash/ai/overlay.go @@ -0,0 +1,347 @@ +package ai + +import ( + "bytes" + "fmt" + "go/token" + "io" + "os" + "strings" + "time" + + "golang.org/x/exp/maps" + + "encr.dev/cli/daemon/apps" + "encr.dev/pkg/errinsrc" + "encr.dev/pkg/fns" + "encr.dev/pkg/idents" + "encr.dev/pkg/paths" + meta "encr.dev/proto/encore/parser/meta/v1" + "encr.dev/v2/internals/parsectx" + "encr.dev/v2/internals/perr" +) + +// servicePaths is a helper struct to manage mapping between service names, pkg paths and filepaths +// It's created by parsing the metadata of the app +type servicePaths struct { + relPaths map[string]paths.RelSlash + root paths.FS + module paths.Mod +} + +func (s *servicePaths) IsNew(svc string) bool { + _, ok := s.relPaths[svc] + return !ok +} + +func (s *servicePaths) Add(svc string, path paths.RelSlash) *servicePaths { + s.relPaths[svc] = path + return s +} + +func (s *servicePaths) PkgPath(svc string) paths.Pkg { + rel := s.RelPath(svc) + return s.module.Pkg(rel) +} + +func (s *servicePaths) FullPath(svc string) paths.FS { + rel := s.RelPath(svc) + return s.root.JoinSlash(rel) +} + +func (s *servicePaths) RelPath(svc string) paths.RelSlash { + pkgName, ok := s.relPaths[svc] + if !ok { + pkgName = paths.RelSlash(strings.ToLower(svc)) + } + return pkgName +} + +func (s *servicePaths) FileName(svc, name string) (paths.FS, error) { + relPath, err := s.RelFileName(svc, name) + if err != nil { + return "", err + } + return s.root.JoinSlash(relPath), nil +} + +func (s *servicePaths) RelFileName(svc, name string) (paths.RelSlash, error) { + pkgPath := s.FullPath(svc) + name = idents.Convert(name, idents.SnakeCase) + fileName := name + ".go" + var i int + for { + fspath := pkgPath.Join(fileName) + if _, err := os.Stat(fspath.ToIO()); os.IsNotExist(err) { + return s.RelPath(svc).Join(fileName), nil + } else if err != nil { + return "", err + } + i++ + fileName = fmt.Sprintf("%s_%d.go", name, i) + } +} + +func newServicePaths(app *apps.Instance) (*servicePaths, error) { + md, err := app.CachedMetadata() + if err != nil { + return nil, err + } + pkgRelPath := fns.ToMap(md.Pkgs, func(p *meta.Package) string { return p.RelPath }) + svcPaths := &servicePaths{ + relPaths: map[string]paths.RelSlash{}, + root: paths.FS(app.Root()), + module: paths.Mod(md.ModulePath), + } + for _, svc := range md.Svcs { + if pkgRelPath[svc.RelPath] != nil { + svcPaths.Add(svc.Name, paths.RelSlash(pkgRelPath[svc.RelPath].RelPath)) + } + } + return svcPaths, nil +} + +// An overlay is a virtual file that is used to store the source code of an endpoint or types +// It automatically generates a header with pkg name and imports. +// It implements os.FileInfo and os.DirEntry interfaces +type overlay struct { + path paths.FS + endpoint *Endpoint + service *Service + codeType CodeType + content []byte + headerOffset token.Position +} + +func (o *overlay) Type() os.FileMode { + return o.Mode() +} + +func (o *overlay) Info() (os.FileInfo, error) { + return o, nil +} + +func (o *overlay) Name() string { + return o.path.Base() +} + +func (o *overlay) Size() int64 { + return int64(len(o.content)) +} + +func (o *overlay) Mode() os.FileMode { + return os.ModePerm +} + +func (o *overlay) ModTime() time.Time { + return time.Now() +} + +func (o *overlay) IsDir() bool { + return false +} + +func (o *overlay) Sys() any { + //TODO implement me + panic("implement me") +} + +func (o *overlay) Stat() (os.FileInfo, error) { + return o, nil +} + +func (o *overlay) Reader() io.ReadCloser { + return &overlayReader{o, bytes.NewReader(o.content)} +} + +// overlayReader is a wrapper around the overlay to implement io.ReadCloser +type overlayReader struct { + *overlay + *bytes.Reader +} + +func (o *overlayReader) Close() error { return nil } + +var ( + _ os.FileInfo = (*overlay)(nil) + _ os.DirEntry = (*overlay)(nil) +) + +func newOverlays(app *apps.Instance, overwrite bool, services ...Service) (*overlays, error) { + svcPaths, err := newServicePaths(app) + if err != nil { + return nil, err + } + o := &overlays{ + items: map[paths.FS]*overlay{}, + paths: svcPaths, + } + for _, s := range services { + for _, e := range s.Endpoints { + if overwrite { + e.TypeSource = "" + e.EndpointSource = "" + } + if err := o.add(s, e); err != nil { + return nil, err + } + } + } + return o, nil +} + +// overlays is a collection of virtual files that are used to store the source code of endpoints and types +// in memory. It's modelled as a replacement for the os package. +type overlays struct { + items map[paths.FS]*overlay + paths *servicePaths +} + +func (o *overlays) Stat(name string) (os.FileInfo, error) { + f, ok := o.items[paths.FS(name)] + if !ok { + // else return the filesystem file + return os.Stat(name) + } + return f, nil +} + +func (o *overlays) ReadDir(name string) ([]os.DirEntry, error) { + entries := map[string]os.DirEntry{} + osFiles, err := os.ReadDir(name) + for _, f := range osFiles { + entries[f.Name()] = f + } + dir := paths.FS(name) + for _, info := range o.items { + if dir == info.path.Dir() { + entries[info.path.Base()] = info + } + } + if len(entries) == 0 && err != nil { + return nil, err + } + return maps.Values(entries), nil +} + +func (o *overlays) PkgOverlay() map[string][]byte { + files := map[string][]byte{} + for f, info := range o.items { + files[f.ToIO()] = info.content + } + return files +} + +func (o *overlays) ReadFile(name string) ([]byte, error) { + f, ok := o.items[paths.FS(name)] + if !ok { + // else return the filesystem file + return os.ReadFile(name) + } + return f.content, nil +} + +func (o *overlays) Open(name string) (io.ReadCloser, error) { + f, ok := o.items[paths.FS(name)] + if !ok { + // else return the filesystem file + return os.Open(name) + } + return f.Reader(), nil +} + +func (o *overlays) pkgPaths() []paths.Pkg { + pkgs := map[paths.Pkg]struct{}{} + for _, info := range o.items { + pkgs[o.paths.PkgPath(info.service.Name)] = struct{}{} + } + return maps.Keys(pkgs) +} + +func (o *overlays) get(p paths.FS) (*overlay, bool) { + rtn, ok := o.items[p] + return rtn, ok +} + +// validationErrors converts a perr.List into a slice of ValidationErrors +func (o *overlays) validationErrors(list *perr.List) []ValidationError { + var rtn []ValidationError + for i := 0; i < list.Len(); i++ { + err := list.At(i) + rtn = append(rtn, o.validationError(err)...) + } + return rtn +} + +// validationError translates errinsrc.ErrInSrc into a ValidationError which is a simplified error +// used for displaying errors in the dashboard +func (o *overlays) validationError(err *errinsrc.ErrInSrc) []ValidationError { + if err.Params.Locations == nil { + return []ValidationError{{ + Message: err.Params.Summary, + }} + } + var rtn []ValidationError + for _, loc := range err.Params.Locations { + o, ok := o.get(paths.FS(loc.File.FullPath)) + if !ok { + rtn = append(rtn, ValidationError{ + Message: err.Params.Summary, + }) + continue + } + rtn = append(rtn, ValidationError{ + Service: o.service.ID, + Endpoint: o.endpoint.ID, + CodeType: o.codeType, + Message: err.Params.Summary, + Start: &Pos{ + Line: loc.Start.Line - o.headerOffset.Line, + Column: loc.Start.Col - o.headerOffset.Column, + }, + End: &Pos{ + Line: loc.End.Line - o.headerOffset.Line, + Column: loc.End.Col - o.headerOffset.Column, + }, + }) + } + return rtn +} + +// add creates new overlays for an endpoint and its types. +// We create separate overlays for each endpoint and its types to allow for easier parsing and code generation. +func (o *overlays) add(s Service, e *Endpoint) error { + p, err := o.paths.FileName(s.Name, e.Name+"_func") + if err != nil { + return err + } + offset, content := toSrcFile(p, s.Name, e.EndpointSource) + e.EndpointSource = string(content[offset.Offset:]) + o.items[p] = &overlay{ + path: p, + endpoint: e, + service: &s, + codeType: CodeTypeEndpoint, + content: content, + headerOffset: offset, + } + p, err = o.paths.FileName(s.Name, e.Name+"_types") + if err != nil { + return err + } + offset, content = toSrcFile(p, s.Name, e.TypeSource) + e.TypeSource = string(content[offset.Offset:]) + o.items[p] = &overlay{ + path: p, + endpoint: e, + service: &s, + codeType: CodeTypeTypes, + content: content, + headerOffset: offset, + } + return nil +} + +var ( + _ parsectx.OverlaidOSFS = (*overlays)(nil) +) diff --git a/cli/daemon/dash/ai/parser.go b/cli/daemon/dash/ai/parser.go new file mode 100644 index 0000000000..331a0bd50a --- /dev/null +++ b/cli/daemon/dash/ai/parser.go @@ -0,0 +1,315 @@ +package ai + +import ( + "context" + "go/ast" + "go/token" + "runtime" + "slices" + "strings" + + "github.com/rs/zerolog" + + "encr.dev/cli/daemon/apps" + "encr.dev/internal/env" + "encr.dev/pkg/fns" + "encr.dev/pkg/paths" + "encr.dev/v2/internals/parsectx" + "encr.dev/v2/internals/perr" + "encr.dev/v2/internals/pkginfo" + "encr.dev/v2/internals/schema" + "encr.dev/v2/parser/apis" + "encr.dev/v2/parser/apis/api" + "encr.dev/v2/parser/apis/api/apienc" + "encr.dev/v2/parser/resource/resourceparser" +) + +// parseErrorList parses a list of errors docs from a doc string. +func parseErrorList(doc string) (string, []*Error) { + doc, errs := parseDocList(doc, ErrDocPrefix) + return doc, fns.Map(errs, func(e docListItem) *Error { + return &Error{ + Code: e.Key, + Doc: e.Doc, + } + }) +} + +// parsePathList parses a list of path docs from a doc string. +func parsePathList(doc string) (string, map[string]string) { + doc, docs := parseDocList(doc, PathDocPrefix) + rtn := map[string]string{} + for _, d := range docs { + rtn[d.Key] = d.Doc + } + return doc, rtn +} + +// parseDocList parses a list of key-value pairs from a doc string. +// e.g. +// +// Errors: +// - NotFound: The requested resource was not found. +// - InvalidArgument: The request had invalid arguments. +func parseDocList(doc, section string) (string, []docListItem) { + var errs []docListItem + lines := strings.Split(doc, "\n") + start := -1 + end := -1 + for i, line := range lines { + end = i + if strings.HasPrefix(strings.TrimSpace(line), section+":") { + start = i + + } else if start == -1 { + continue + } else if len(line) > 2 { + switch strings.TrimSpace(line[:2]) { + case "-", "": + default: + end = i - 1 + break + } + } + lines[i] = strings.TrimSpace(line) + if line == "" && lines[i-1] == "" { + break + } + } + if start == -1 { + return doc, errs + } + + for _, line := range lines[start+1 : end+1] { + key, doc, ok := strings.Cut(line, ":") + key = strings.TrimPrefix(key, "-") + key = strings.TrimSpace(key) + if ok { + errs = append(errs, docListItem{ + Key: key, + Doc: strings.TrimSpace(doc), + }) + } else if len(errs) > 0 && line != "" { + errs[len(errs)-1].Doc += "\n" + line + } + } + return strings.Join(lines[:start], "\n"), errs +} + +// docListItem represents a key-value pair in a doc list. +type docListItem struct { + Key string + Doc string +} + +// deref returns the underlying type of a pointer type. +func deref(p schema.Type) schema.Type { + for { + if pt, ok := p.(schema.PointerType); ok { + p = pt.Elem + } else { + return p + } + } +} + +// parseCode updates the structured EndpointInput data based on the code in +// EndpointInput.TypeSource and EndpointInput.EndpointSource fields. +func parseCode(ctx context.Context, app *apps.Instance, services []Service) (rtn *SyncResult, err error) { + // assamble an overlay with all our newly defined endpoints + overlays, err := newOverlays(app, false, services...) + if err != nil { + return nil, err + } + + fs := token.NewFileSet() + errs := perr.NewList(ctx, fs, overlays.ReadFile) + rootDir := paths.RootedFSPath(app.Root(), ".") + pc := &parsectx.Context{ + Ctx: ctx, + Log: zerolog.Logger{}, + Build: parsectx.BuildInfo{ + Experiments: nil, + GOROOT: paths.RootedFSPath(env.EncoreGoRoot(), "."), + GOARCH: runtime.GOARCH, + GOOS: runtime.GOOS, + }, + MainModuleDir: rootDir, + FS: fs, + ParseTests: false, + Errs: errs, + Overlay: overlays, + } + + // Catch parser bailouts and convert them to ValidationErrors + defer func() { + perr.CatchBailout(recover()) + if rtn == nil { + rtn = &SyncResult{ + Services: services, + } + } + rtn.Errors = overlays.validationErrors(errs) + }() + + // Load overlay packages using the encore loader + loader := pkginfo.New(pc) + pkgs := map[paths.Pkg]*pkginfo.Package{} + for _, pkgPath := range overlays.pkgPaths() { + pkg, ok := loader.LoadPkg(token.NoPos, pkgPath) + if ok { + pkgs[pkgPath] = pkg + } + } + + // Create a schema parser to help us parse the types + schemaParser := schema.NewParser(pc, loader) + + for _, pkg := range pkgs { + // Use the API parser to parser the endpoints for each overlaid package + pass := &resourceparser.Pass{ + Context: pc, + SchemaParser: schemaParser, + Pkg: pkg, + } + apis.Parser.Run(pass) + for _, r := range pass.Resources() { + switch r := r.(type) { + case *api.Endpoint: + // We're only interested in endpoints that are in our overlays + overlay, ok := overlays.get(r.File.FSPath) + if !ok { + continue + } + e := overlay.endpoint + + pathDocs := map[string]string{} + e.Doc, e.Errors = parseErrorList(r.Doc) + e.Doc, pathDocs = parsePathList(e.Doc) + e.Name = r.Name + e.Method = r.HTTPMethods[0] + e.Visibility = VisibilityType(r.Access) + e.Language = "GO" + e.Path = toPathSegments(r.Path, pathDocs) + + // Clear the types as we will reparse them + e.Types = []*Type{} + if nr, ok := deref(r.Request).(schema.NamedType); ok { + e.RequestType = nr.String() + // If the request type is in the overlays, we should parse it and + // add it to the endpoint associated with the overlay + ov, ok := overlays.get(nr.DeclInfo.File.FSPath) + if len(r.RequestEncoding()) > 0 && ok { + e = ov.endpoint + e.Types = append(e.Types, &Type{ + Name: nr.String(), + Doc: strings.TrimSpace(nr.DeclInfo.Doc), + Fields: fns.Map(r.RequestEncoding()[0].AllParameters(), func(f *apienc.ParameterEncoding) *TypeField { + return &TypeField{ + Name: f.SrcName, + WireName: f.WireName, + Location: f.Location, + Type: f.Type.String(), + Doc: strings.TrimSpace(f.Doc), + } + }), + }) + } + } + if nr, ok := deref(r.Response).(schema.NamedType); ok { + e.ResponseType = nr.String() + // If the response type is in the overlays, we should parse it and + // add it to the endpoint associated with the overlay + ov, ok := overlays.get(nr.DeclInfo.File.FSPath) + if r.ResponseEncoding() != nil && ok { + e = ov.endpoint + e.Types = append(e.Types, &Type{ + Name: nr.String(), + Doc: strings.TrimSpace(nr.DeclInfo.Doc), + Fields: fns.Map(r.ResponseEncoding().AllParameters(), func(f *apienc.ParameterEncoding) *TypeField { + return &TypeField{ + Name: f.SrcName, + WireName: f.WireName, + Location: f.Location, + Type: f.Type.String(), + Doc: strings.TrimSpace(f.Doc), + } + }), + }) + } + } + } + } + // Parse types which are in the overlays but not used in request/response + for _, file := range pkg.Files { + ast.Inspect(file.AST(), func(node ast.Node) bool { + switch node := node.(type) { + case *ast.GenDecl: + // We're only interested in type declarations + if node.Tok != token.TYPE { + return true + } + for _, spec := range node.Specs { + d := spec.(*ast.TypeSpec) + // If the type is not defined in our overlays, skip it. + olay, ok := overlays.get(file.FSPath) + if !ok { + continue + } + + // If it's not a struct type, skip it. + s, ok := schemaParser.ParseType(file, d.Type).(schema.StructType) + if !ok { + continue + } + + e := olay.endpoint + // If the type has already been parsed, skip it. + if slices.ContainsFunc(e.Types, func(t *Type) bool { return t.Name == d.Name.Name }) { + continue + } + + // Otherwise we should add it + e.Types = append(e.Types, &Type{ + Name: d.Name.Name, + Doc: docText(node.Doc), + Fields: fns.MapAndFilter(s.Fields, parseTypeField), + }) + } + } + return true + }) + } + } + return &SyncResult{ + Services: services, + }, nil +} + +// parseTypeField is a helper function to parse a schema field into a TypeField. +func parseTypeField(f schema.StructField) (*TypeField, bool) { + name, ok := f.Name.Get() + if !ok { + return nil, false + } + // Fields which are parsed by this functions are not a request or response type, + // so we can assume the wire name is the json tag name. + wireName := name + if tag, err := f.Tag.Get("json"); err == nil { + wireName = tag.Name + } + return &TypeField{ + Name: name, + Type: f.Type.String(), + Doc: f.Doc, + WireName: wireName, + }, true +} + +// helper function to extract the text from a comment node or "" if nil +func docText(c *ast.CommentGroup) string { + if c == nil { + return "" + } + return strings.TrimSpace(c.Text()) +} diff --git a/cli/daemon/dash/ai/sql.go b/cli/daemon/dash/ai/sql.go new file mode 100644 index 0000000000..48ac26485d --- /dev/null +++ b/cli/daemon/dash/ai/sql.go @@ -0,0 +1,28 @@ +package ai + +import ( + "os" + "os/exec" + "path/filepath" + + "github.com/golang/protobuf/proto" + + "encr.dev/cli/daemon/apps" + "encr.dev/proto/encore/daemon" +) + +// ParseSQLSchema uses SQLC to parse the migration files for an encore database and returns +// the parsed catalog +func ParseSQLSchema(app *apps.Instance, schema string) (*daemon.SQLCPlugin_Catalog, error) { + schemaPath := filepath.Join(app.Root(), schema) + cmd := exec.Command(os.Args[0], "generate-sql-schema", "--proto", schemaPath) + output, err := cmd.Output() + if err != nil { + return nil, err + } + var req daemon.SQLCPlugin_GenerateRequest + if err := proto.Unmarshal(output, &req); err != nil { + return nil, err + } + return req.Catalog, nil +} diff --git a/cli/daemon/dash/ai/types.go b/cli/daemon/dash/ai/types.go new file mode 100644 index 0000000000..8b0ea15568 --- /dev/null +++ b/cli/daemon/dash/ai/types.go @@ -0,0 +1,248 @@ +package ai + +import ( + "encr.dev/v2/parser/apis/api/apienc" +) + +type VisibilityType string + +const ( + VisibilityTypePublic VisibilityType = "public" + VisibilityTypePrivate VisibilityType = "private" + VisibilityTypeAuth VisibilityType = "auth" +) + +type SegmentType string + +const ( + SegmentTypeLiteral SegmentType = "literal" + SegmentTypeParam SegmentType = "param" + SegmentTypeWildcard SegmentType = "wildcard" + SegmentTypeFallback SegmentType = "fallback" +) + +type SegmentValueType string + +const SegmentValueTypeString SegmentValueType = "string" + +type PathSegments []PathSegment + +type PathSegment struct { + Type SegmentType `json:"type,omitempty"` + Value *string `json:"value,omitempty"` + ValueType *SegmentValueType `json:"valueType,omitempty"` + Doc string `graphql:"-" json:"doc,omitempty"` +} + +func (p PathSegment) DocItem() (string, string) { + return *p.Value, p.Doc +} + +type Endpoint struct { + ID string `json:"id,omitempty"` + Name string `json:"name"` + Doc string `json:"doc"` + Method string `json:"method"` + Visibility VisibilityType `json:"visibility"` + Path PathSegments `json:"path"` + RequestType string `json:"requestType,omitempty"` + ResponseType string `json:"responseType,omitempty"` + Errors []*Error `json:"errors,omitempty"` + Types []*Type `json:"types,omitempty"` + Language string `json:"language,omitempty"` + TypeSource string `json:"typeSource,omitempty"` + EndpointSource string `json:"endpointSource,omitempty"` +} + +func (s *Endpoint) Auth() bool { + return s.Visibility == VisibilityTypeAuth +} + +// GraphQL scrubs data that is not needed for the graphql client +func (s *Endpoint) GraphQL() *Endpoint { + s.ID = "" + s.EndpointSource = "" + s.TypeSource = "" + s.Types = nil + s.Language = "" + for i, _ := range s.Path { + s.Path[i].Doc = "" + } + return s +} + +type Type struct { + Name string `json:"name,omitempty"` + Doc string `json:"doc,omitempty"` + Fields []*TypeField `json:"fields,omitempty"` +} + +type Service struct { + ID string `json:"id,omitempty"` + Name string `json:"name,omitempty"` + Doc string `json:"doc,omitempty"` + Endpoints []*Endpoint `json:"endpoints,omitempty"` +} + +func (s Service) GetName() string { + return s.Name +} + +func (s Service) GetEndpoints() []*Endpoint { + return s.Endpoints +} + +// ServiceInput is the graphql input type for our queries +// the graphQL client we use requires the type name to match the +// graphql type +type ServiceInput Service + +// GraphQL scrubs data that is not needed for the graphql client +func (s Service) GraphQL() ServiceInput { + s.ID = "" + for _, e := range s.Endpoints { + e.GraphQL() + } + return ServiceInput(s) +} + +type BaseAIUpdateType struct { + Type string `graphql:"__typename" json:"type"` +} + +func (b BaseAIUpdateType) IsAIUpdateType() {} + +type AIUpdateType interface { + IsAIUpdateType() +} + +type AIStreamUpdate = Result[AIUpdateType] + +func ptr[T any](val T) *T { + return &val +} + +type Result[T any] struct { + Value T + Finished *bool + Error *string +} + +type EndpointUpdate struct { + BaseAIUpdateType + Service string `json:"service,omitempty"` + Name string `json:"name,omitempty"` + Doc string `json:"doc,omitempty"` + Method string `json:"method,omitempty"` + Visibility VisibilityType `json:"visibility,omitempty"` + Path []PathSegment `json:"path,omitempty"` + RequestType string `json:"requestType,omitempty"` + ResponseType string `json:"responseType,omitempty"` + Errors []string `json:"errors,omitempty"` +} + +type ServiceUpdate struct { + BaseAIUpdateType + Name string `json:"name,omitempty"` + Doc string `json:"doc,omitempty"` +} + +type TypeUpdate struct { + BaseAIUpdateType + Service string `json:"service,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + Name string `json:"name,omitempty"` + Doc string `graphql:"mdoc: doc" json:"doc,omitempty"` +} + +type AISessionID string + +type SessionUpdate struct { + BaseAIUpdateType + Id AISessionID +} + +type TitleUpdate struct { + BaseAIUpdateType + Title string +} + +type LocalEndpointUpdate struct { + Type string `json:"type,omitempty"` + Service string `json:"service,omitempty"` + Endpoint *Endpoint `json:"endpoint,omitempty"` +} + +type TypeField struct { + Name string `json:"name,omitempty"` + WireName string `json:"wireName,omitempty"` + Type string `json:"type,omitempty"` + Location apienc.WireLoc `json:"location,omitempty"` + Doc string `json:"doc,omitempty"` +} + +type TypeFieldUpdate struct { + BaseAIUpdateType + Service string `json:"service,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + Struct string `json:"struct,omitempty"` + Name string `json:"name,omitempty"` + Type string `json:"type,omitempty"` + Doc string `graphql:"mdoc: doc" json:"doc,omitempty"` +} + +type Error struct { + Code string `json:"code,omitempty"` + Doc string `json:"doc,omitempty"` +} + +func (e Error) DocItem() (string, string) { + return e.Code, e.Doc +} + +func (e Error) String() string { + return e.Code +} + +type ErrorUpdate struct { + BaseAIUpdateType + Code string `json:"code,omitempty"` + Doc string `json:"doc,omitempty"` + Service string `json:"service,omitempty"` + Endpoint string `json:"endpoint,omitempty"` +} + +type PathParamUpdate struct { + BaseAIUpdateType + Service string `json:"service,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + Param string `json:"param,omitempty"` + Doc string `json:"doc,omitempty"` +} + +type SyncResult struct { + Services []Service `json:"services"` + Errors []ValidationError `json:"errors"` +} + +// ValidationError is a simplified ErrInSrc to return to the dashboard +type ValidationError struct { + Service string `json:"service"` + Endpoint string `json:"endpoint"` + CodeType CodeType `json:"codeType"` + Message string `json:"message"` + Start *Pos `json:"start,omitempty"` + End *Pos `json:"end,omitempty"` +} + +type CodeType string + +const ( + CodeTypeEndpoint CodeType = "endpoint" + CodeTypeTypes CodeType = "types" +) + +type Pos struct { + Line int `json:"line"` + Column int `json:"column"` +} diff --git a/cli/daemon/dash/ai/types_test.go b/cli/daemon/dash/ai/types_test.go new file mode 100644 index 0000000000..cb513853db --- /dev/null +++ b/cli/daemon/dash/ai/types_test.go @@ -0,0 +1,39 @@ +package ai + +import ( + "fmt" + "strings" + "testing" +) + +func TestWrapDoc(t *testing.T) { + var wrapTests = []struct { + width int + string string + }{ + {1, "Lorem ipsum dolor sit amet"}, + {80, "Lorem ipsum dolor sit amet"}, + {80, "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."}, + {80, "Lorem Loremipsumdolorsitamet,consecteturadipiscingelit,seddoeiusmodtemporincididuntutlaboreetdoloremagna"}, + {30, "Loremipsumdolorsitamet,consecteturadipiscingelit,seddoeiusmodtemporincididuntutlaboreetdoloremagna"}, + {80, ""}, + {80, "a\nb\nc\nd"}, + } + for _, test := range wrapTests { + t.Run(fmt.Sprintf("WrapDoc(%d, %s)", test.width, test.string), func(t *testing.T) { + result := wrapDoc(test.string, test.width) + lines := strings.Split(result, "\n") + for i, line := range lines { + if len(line) > test.width && strings.Contains(line, " ") { + t.Errorf("Line too long: %s", line) + } + if i+1 < len(lines) { + nextWord, _, _ := strings.Cut(lines[i+1], " ") + if len(line)+len(nextWord) < test.width { + t.Errorf("Line too short: %s", line) + } + } + } + }) + } +} diff --git a/cli/daemon/dash/apiproxy/apiproxy.go b/cli/daemon/dash/apiproxy/apiproxy.go new file mode 100644 index 0000000000..f17232bff7 --- /dev/null +++ b/cli/daemon/dash/apiproxy/apiproxy.go @@ -0,0 +1,45 @@ +package apiproxy + +import ( + "net/http" + "net/http/httputil" + "net/url" + "runtime" + + "github.com/cockroachdb/errors" + "golang.org/x/oauth2" + + "encr.dev/internal/conf" + "encr.dev/internal/version" +) + +func New(targetURL string) (*httputil.ReverseProxy, error) { + target, err := url.Parse(targetURL) + if err != nil { + return nil, errors.Wrap(err, "parse target url") + } + + proxy := &httputil.ReverseProxy{ + Transport: &oauth2.Transport{ + Base: http.DefaultTransport, + Source: oauth2.ReuseTokenSource(nil, conf.DefaultTokenSource), + }, + ErrorHandler: func(writer http.ResponseWriter, request *http.Request, err error) { + if errors.Is(err, conf.ErrNotLoggedIn) { + writer.WriteHeader(http.StatusUnauthorized) + return + } + writer.WriteHeader(http.StatusBadGateway) + + }, + Rewrite: func(r *httputil.ProxyRequest) { + r.Out.URL = target + r.Out.Header.Set("User-Agent", "EncoreCLI/"+version.Version) + r.Out.Header.Set("X-Encore-Dev-Dash", "true") + r.Out.Header.Set("X-Encore-Version", version.Version) + r.Out.Header.Set("X-Encore-GOOS", runtime.GOOS) + r.Out.Header.Set("X-Encore-GOARCH", runtime.GOARCH) + }, + } + return proxy, nil +} diff --git a/cli/daemon/dash/dash.go b/cli/daemon/dash/dash.go index d8ae14dae6..bff59e1c4d 100644 --- a/cli/daemon/dash/dash.go +++ b/cli/daemon/dash/dash.go @@ -2,28 +2,87 @@ package dash import ( - "bytes" "context" "encoding/json" + "errors" "fmt" - "io/ioutil" - "net/http" "path/filepath" + "slices" + "strings" + "sync" + "time" - "encr.dev/cli/daemon/run" - "encr.dev/cli/daemon/runtime/trace" - "encr.dev/cli/internal/jsonrpc2" "github.com/golang/protobuf/jsonpb" "github.com/rs/zerolog/log" + + "encr.dev/cli/daemon/apps" + "encr.dev/cli/daemon/dash/ai" + "encr.dev/cli/daemon/engine/trace2" + "encr.dev/cli/daemon/namespace" + "encr.dev/cli/daemon/run" + "encr.dev/cli/daemon/sqldb" + "encr.dev/cli/internal/browser" + "encr.dev/cli/internal/jsonrpc2" + "encr.dev/cli/internal/onboarding" + "encr.dev/cli/internal/telemetry" + "encr.dev/internal/version" + "encr.dev/parser/encoding" + "encr.dev/pkg/editors" + "encr.dev/pkg/errlist" + "encr.dev/pkg/jsonext" + tracepb2 "encr.dev/proto/encore/engine/trace2" + meta "encr.dev/proto/encore/parser/meta/v1" ) type handler struct { - rpc jsonrpc2.Conn - run *run.Manager - tr *trace.Store + rpc jsonrpc2.Conn + apps *apps.Manager + run *run.Manager + ns *namespace.Manager + ai *ai.Manager + tr trace2.Store +} + +func (h *handler) GetMeta(appID string) (*meta.Data, error) { + runInstance := h.run.FindRunByAppID(appID) + var md *meta.Data + if runInstance != nil && runInstance.ProcGroup() != nil { + md = runInstance.ProcGroup().Meta + } else { + app, err := h.apps.FindLatestByPlatformOrLocalID(appID) + if err != nil { + return nil, err + } + md, err = app.CachedMetadata() + if err != nil { + return nil, err + } else if md == nil { + return nil, err + } + } + return md, nil +} + +func (h *handler) GetNamespace(ctx context.Context, appID string) (*namespace.Namespace, error) { + runInstance := h.run.FindRunByAppID(appID) + if runInstance != nil && runInstance.ProcGroup() != nil { + return runInstance.NS, nil + } else { + app, err := h.apps.FindLatestByPlatformOrLocalID(appID) + if err != nil { + return nil, err + } + ns, err := h.ns.GetActive(ctx, app) + if err != nil { + return nil, err + } + return ns, nil + } } func (h *handler) Handle(ctx context.Context, reply jsonrpc2.Replier, r jsonrpc2.Request) error { + reply = makeProtoReplier(reply) + unmarshal := func(dst interface{}) error { if r.Params() == nil { return fmt.Errorf("missing params") @@ -32,46 +91,210 @@ func (h *handler) Handle(ctx context.Context, reply jsonrpc2.Replier, r jsonrpc2 } switch r.Method() { + case "db/query": + var p QueryRequest + if err := unmarshal(&p); err != nil { + return reply(ctx, nil, err) + } + res, err := h.Query(ctx, p) + return reply(ctx, res, err) + case "db/transaction": + var p TransactionRequest + if err := unmarshal(&p); err != nil { + return reply(ctx, nil, err) + } + res, err := h.Transaction(ctx, p) + return reply(ctx, res, err) + case "onboarding/get": + state, err := onboarding.Load() + if err != nil { + return reply(ctx, nil, err) + } + resp := map[string]time.Time{} + for key, val := range state.EventMap { + if val.IsSet() { + resp[key] = val.UTC() + } + } + return reply(ctx, resp, nil) + case "onboarding/set": + type params struct { + Properties []string `json:"properties"` + } + var p params + if err := unmarshal(&p); err != nil { + return reply(ctx, nil, err) + } + state, err := onboarding.Load() + if err != nil { + return reply(ctx, nil, err) + } + for _, prop := range p.Properties { + state.Property(prop).Set() + } + err = state.Write() + if err != nil { + return reply(ctx, nil, err) + } + return reply(ctx, nil, nil) + case "telemetry": + type params struct { + Event string `json:"event"` + Properties map[string]interface{} `json:"properties"` + Once bool `json:"once,omitempty"` + } + var p params + if err := unmarshal(&p); err != nil { + return reply(ctx, nil, err) + } + if p.Once { + telemetry.SendOnce(p.Event, p.Properties) + } else { + telemetry.Send(p.Event, p.Properties) + } + return reply(ctx, "ok", nil) + case "version": + type versionResp struct { + Version string `json:"version"` + Channel string `json:"channel"` + } + + rtn := versionResp{ + Version: version.Version, + Channel: string(version.Channel), + } + + return reply(ctx, rtn, nil) + case "list-apps": type app struct { - ID string `json:"id"` - Name string `json:"name"` + ID string `json:"id"` + Name string `json:"name"` + AppRoot string `json:"app_root"` + Offline bool `json:"offline,omitempty"` } - runs := h.run.ListRuns() + apps := []app{} // prevent marshalling as null - seen := make(map[string]bool) - for _, r := range runs { - id := r.AppID - name := r.AppSlug - if name == "" { - name = filepath.Base(r.Root) + + // Load all the apps we know about + allApp, err := h.apps.List() + if err != nil { + return reply(ctx, nil, err) + } + for _, instance := range allApp { + data := app{ + ID: instance.PlatformOrLocalID(), + Name: instance.Name(), + AppRoot: instance.Root(), + Offline: true, } - if !seen[id] { - seen[id] = true - apps = append(apps, app{ID: id, Name: name}) + + if run := h.run.FindRunByAppID(instance.PlatformOrLocalID()); run != nil { + data.Offline = false } + + apps = append(apps, data) } + + // Sort the apps by offline status, then by name + slices.SortStableFunc(apps, func(a, b app) int { + if a.Offline == b.Offline { + return strings.Compare(a.Name, b.Name) + } + if a.Offline { + return 1 + } + return -1 + }) + return reply(ctx, apps, nil) + case "traces/clear": + telemetry.Send("traces.clear") + var params struct { + AppID string `json:"app_id"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + err := h.tr.Clear(ctx, params.AppID) + return reply(ctx, "ok", err) + case "traces/list": + telemetry.Send("traces.list") + var params struct { + AppID string `json:"app_id"` + MessageID string `json:"message_id"` + TestTraces *bool `json:"test_traces,omitempty"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + + query := &trace2.Query{ + AppID: params.AppID, + TestFilter: params.TestTraces, + MessageID: params.MessageID, + Limit: 100, + } + var list []*tracepb2.SpanSummary + iter := func(s *tracepb2.SpanSummary) bool { + list = append(list, s) + return true + } + err := h.tr.List(ctx, query, iter) + if err != nil { + log.Error().Err(err).Msg("dash: could not list traces") + } + return reply(ctx, list, err) + + case "traces/get": + telemetry.Send("traces.get") + var params struct { + AppID string `json:"app_id"` + TraceID string `json:"trace_id"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } - case "list-traces": + var events []*tracepb2.TraceEvent + iter := func(ev *tracepb2.TraceEvent) bool { + events = append(events, ev) + return true + } + err := h.tr.Get(ctx, params.AppID, params.TraceID, iter) + if err != nil { + log.Error().Err(err).Msg("dash: could not list trace events") + } + return reply(ctx, events, err) + + case "status": var params struct { AppID string } if err := unmarshal(¶ms); err != nil { return reply(ctx, nil, err) } - traces := h.tr.List(params.AppID) - tr := make([]*Trace, len(traces)) - for i, t := range traces { - tt, err := TransformTrace(t) - if err != nil { + + // Find the latest app by platform ID or local ID. + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + if errors.Is(err, apps.ErrNotFound) { + return reply(ctx, map[string]interface{}{"running": false}, nil) + } else { return reply(ctx, nil, err) } - tr[i] = tt } - return reply(ctx, tr, nil) - case "status": + // Now find the running instance(s) + runInstance := h.run.FindRunByAppID(params.AppID) + status, err := buildAppStatus(app, runInstance) + if err != nil { + log.Error().Err(err).Msg("dash: could not build app status") + return reply(ctx, nil, err) + } + + return reply(ctx, status, nil) + case "db-migration-status": var params struct { AppID string } @@ -79,82 +302,290 @@ func (h *handler) Handle(ctx context.Context, reply jsonrpc2.Replier, r jsonrpc2 return reply(ctx, nil, err) } - run := h.run.FindRunByAppID(params.AppID) - if run == nil { - return reply(ctx, map[string]interface{}{"running": false}, nil) - } - proc := run.Proc() - if proc == nil { - return reply(ctx, map[string]interface{}{"running": false}, nil) + // Find the latest app by platform ID or local ID. + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + return reply(ctx, nil, err) } - m := &jsonpb.Marshaler{OrigName: true, EmitDefaults: true} - for _, svc := range proc.Meta.Svcs { - for _, rpc := range svc.Rpcs { - log.Info().Str("rpc", svc.Name+"."+rpc.Name).Msg("rpc") - } + appMeta, err := h.GetMeta(params.AppID) + if err != nil { + return reply(ctx, nil, err) } - str, err := m.MarshalToString(proc.Meta) + namespace, err := h.GetNamespace(ctx, params.AppID) if err != nil { - log.Error().Err(err).Msg("dash: could not marshal app metadata") return reply(ctx, nil, err) } - return reply(ctx, map[string]interface{}{ - "running": true, - "appID": run.AppID, - "pid": run.ID, - "meta": json.RawMessage(str), - "port": run.Port, - }, nil) + clusterType := sqldb.Run + cluster, ok := h.run.ClusterMgr.Get(sqldb.GetClusterID(app, clusterType, namespace)) + if !ok { + return reply(ctx, nil, fmt.Errorf("failed to get database cluster of type %s", clusterType)) + } + status := buildDbMigrationStatus(ctx, appMeta, cluster) + + return reply(ctx, status, nil) case "api-call": - var params struct { - AppID string - Endpoint string - Payload []byte - AuthToken string + telemetry.Send("api.call") + var params run.ApiCallParams + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + res, err := run.CallAPI(ctx, h.run.FindRunByAppID(params.AppID), ¶ms) + return reply(ctx, res, err) + + case "editors/list": + var resp struct { + Editors []string `json:"editors"` } + found, err := editors.Resolve(ctx) + if err != nil { + log.Err(err).Msg("dash: could not list editors") + return reply(ctx, nil, err) + } + + for _, e := range found { + resp.Editors = append(resp.Editors, string(e.Editor)) + } + return reply(ctx, resp, nil) + case "ai/propose-system-design": + telemetry.Send("ai.propose") + log.Debug().Msg("dash: propose-system-design") + var params struct { + AppID string `json:"app_id"` + Prompt string `json:"prompt"` + } if err := unmarshal(¶ms); err != nil { return reply(ctx, nil, err) } - run := h.run.FindRunByAppID(params.AppID) - if run == nil { - log.Error().Str("appID", params.AppID).Msg("dash: cannot make api call: app not running") - return reply(ctx, nil, fmt.Errorf("app not running")) + md, err := h.GetMeta(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + sessionCh := make(chan *ai.AINotification) + defer close(sessionCh) + idResp := sync.Once{} + task, err := h.ai.ProposeSystemDesign(ctx, params.AppID, params.Prompt, md, func(ctx context.Context, msg *ai.AINotification) error { + if _, ok := msg.Value.(ai.SessionUpdate); ok || msg.Error != nil { + idResp.Do(func() { + sessionCh <- msg + }) + if ok { + return nil + } + } + return h.rpc.Notify(ctx, r.Method()+"/stream", msg) + }) + if err != nil { + return reply(ctx, nil, err) } - url := fmt.Sprintf("http://localhost:%d/%s", run.Port, params.Endpoint) - log := log.With().Str("appID", params.AppID).Str("endpoint", params.Endpoint).Logger() + select { + case msg := <-sessionCh: + su, ok := msg.Value.(ai.SessionUpdate) + if !ok || msg.Error != nil { + if msg.Error != nil { + err = jsonrpc2.NewError(ai.ErrorCodeMap[msg.Error.Code], msg.Error.Message) + } else { + err = jsonrpc2.NewError(1, "missing session_id") + } + return reply(ctx, nil, err) + } + return reply(ctx, map[string]string{ + "session_id": string(su.Id), + "subscription_id": task.SubscriptionID, + }, nil) + case <-ctx.Done(): + return reply(ctx, nil, ctx.Err()) + case <-time.NewTimer(10 * time.Second).C: + _ = task.Stop() + return reply(ctx, nil, errors.New("timed out waiting for response")) + } - req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(params.Payload)) + case "ai/modify-system-design": + telemetry.Send("ai.modify") + log.Debug().Msg("dash: modify-system-design") + var params struct { + AppID string `json:"app_id"` + SessionID ai.AISessionID `json:"session_id"` + OriginalPrompt string `json:"original_prompt"` + Prompt string `json:"prompt"` + Proposed []ai.Service `json:"proposed"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + md, err := h.GetMeta(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + task, err := h.ai.ModifySystemDesign(ctx, params.AppID, params.SessionID, params.OriginalPrompt, params.Proposed, params.Prompt, md, func(ctx context.Context, msg *ai.AINotification) error { + return h.rpc.Notify(ctx, r.Method()+"/stream", msg) + }) + return reply(ctx, task.SubscriptionID, err) + case "ai/define-endpoints": + telemetry.Send("ai.details") + log.Debug().Msg("dash: define-endpoints") + log.Debug().Msg("dash: define-endpoints") + var params struct { + AppID string `json:"app_id"` + SessionID ai.AISessionID `json:"session_id"` + Prompt string `json:"prompt"` + Proposed []ai.Service `json:"proposed"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + md, err := h.GetMeta(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + task, err := h.ai.DefineEndpoints(ctx, params.AppID, params.SessionID, params.Prompt, md, params.Proposed, func(ctx context.Context, msg *ai.AINotification) error { + return h.rpc.Notify(ctx, r.Method()+"/stream", msg) + }) + return reply(ctx, task.SubscriptionID, err) + case "ai/parse-code": + log.Debug().Msg("dash: parse-code") + var params struct { + AppID string `json:"app_id"` + Services []ai.Service `json:"services"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + results, err := h.ai.ParseCode(ctx, params.Services, app) + return reply(ctx, results, err) + case "ai/update-code": + log.Debug().Msg("dash: update-code") + var params struct { + AppID string `json:"app_id"` + Services []ai.Service `json:"services"` + Overwrite bool `json:"overwrite"` // Ovwerwrite any existing endpoint code + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + results, err := h.ai.UpdateCode(ctx, params.Services, app, params.Overwrite) + return reply(ctx, results, err) + case "ai/preview-files": + telemetry.Send("ai.preview") + log.Debug().Msg("dash: preview-files") + var params struct { + AppID string `json:"app_id"` + Services []ai.Service `json:"services"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + result, err := h.ai.PreviewFiles(ctx, params.Services, app) + return reply(ctx, result, err) + case "ai/write-files": + telemetry.Send("ai.write") + log.Debug().Msg("dash: write-files") + var params struct { + AppID string `json:"app_id"` + Services []ai.Service `json:"services"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + result, err := h.ai.WriteFiles(ctx, params.Services, app) + return reply(ctx, result, err) + case "ai/parse-sql-schema": + var params struct { + AppID string `json:"app_id"` + } + if err := unmarshal(¶ms); err != nil { + return reply(ctx, nil, err) + } + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) + if err != nil { + return reply(ctx, nil, err) + } + md, err := h.GetMeta(params.AppID) if err != nil { - log.Err(err).Msg("dash: api call failed") return reply(ctx, nil, err) } - if tok := params.AuthToken; tok != "" { - req.Header.Set("Authorization", "Bearer "+tok) + for _, db := range md.SqlDatabases { + _, err := ai.ParseSQLSchema(app, *db.MigrationRelPath) + if err != nil { + return reply(ctx, nil, err) + } + } + return reply(ctx, true, err) + case "editors/open": + telemetry.Send("editors.open") + var params struct { + AppID string `json:"app_id"` + Editor editors.EditorName `json:"editor"` + File string `json:"file"` + StartLine int `json:"start_line,omitempty"` + StartCol int `json:"start_col,omitempty"` + EndLine int `json:"end_line,omitempty"` + EndCol int `json:"end_col,omitempty"` + } + if err := unmarshal(¶ms); err != nil { + log.Warn().Err(err).Msg("dash: could not parse open command") + return reply(ctx, nil, err) + } + + editor, err := editors.Find(ctx, params.Editor) + if err != nil { + log.Err(err).Str("editor", string(params.Editor)).Msg("dash: could not find editor") + return reply(ctx, nil, err) } - resp, err := http.DefaultClient.Do(req) + + app, err := h.apps.FindLatestByPlatformOrLocalID(params.AppID) if err != nil { - log.Error().Err(err).Msg("dash: api call failed") + if errors.Is(err, apps.ErrNotFound) { + return reply(ctx, nil, fmt.Errorf("app not found, try running encore run")) + } + log.Err(err).Str("app_id", params.AppID).Msg("dash: could not find app") return reply(ctx, nil, err) } - body, _ := ioutil.ReadAll(resp.Body) - resp.Body.Close() - log.Info().Int("status", resp.StatusCode).Msg("dash: api call completed") - return reply(ctx, map[string]interface{}{ - "status": resp.Status, - "status_code": resp.StatusCode, - "body": body, - }, nil) + + if !filepath.IsLocal(params.File) { + log.Warn().Str("file", params.File).Msg("dash: file was not local to the repo") + return reply(ctx, nil, errors.New("file path must be local")) + } + params.File = filepath.Join(app.Root(), params.File) + + if err := editors.LaunchExternalEditor(params.File, params.StartLine, params.StartCol, editor); err != nil { + log.Err(err).Str("editor", string(params.Editor)).Msg("dash: could not open file") + return reply(ctx, nil, err) + } + + type openResp struct{} + return reply(ctx, openResp{}, nil) } return jsonrpc2.MethodNotFound(ctx, reply, r) } +type sourceContextResponse struct { + Lines []string `json:"lines"` + Start int `json:"start"` +} + func (h *handler) listenNotify(ctx context.Context, ch <-chan *notification) { for { select { @@ -169,9 +600,8 @@ func (h *handler) listenNotify(ctx context.Context, ch <-chan *notification) { } func (s *Server) listenTraces() { - for tt := range s.traceCh { - // Transforming a trace is fairly expensive, so only do it - // if somebody is listening. + for sp := range s.traceCh { + // Only marshal the trace if someone's listening. s.mu.Lock() hasClients := len(s.clients) > 0 s.mu.Unlock() @@ -179,14 +609,19 @@ func (s *Server) listenTraces() { continue } - tr, err := TransformTrace(tt) + data, err := jsonext.ProtoEncoder.Marshal(sp.Span) if err != nil { - log.Error().Err(err).Msg("dash: could not process trace") + log.Error().Err(err).Msg("dash: could not marshal trace") continue } + s.notify(¬ification{ Method: "trace/new", - Params: tr, + Params: map[string]any{ + "app_id": sp.AppID, + "test_trace": sp.TestTrace, + "span": json.RawMessage(data), + }, }) } } @@ -195,52 +630,65 @@ var _ run.EventListener = (*Server)(nil) // OnStart notifies active websocket clients about the started run. func (s *Server) OnStart(r *run.Run) { - m := &jsonpb.Marshaler{OrigName: true, EmitDefaults: true} - proc := r.Proc() - str, err := m.MarshalToString(proc.Meta) + status, err := buildAppStatus(r.App, r) if err != nil { - log.Error().Err(err).Msg("dash: could not marshal app meta") + log.Error().Err(err).Msg("dash: could not build app status") return } + // Open the browser if needed. + browserMode := r.Params.Browser + if browserMode == run.BrowserModeAlways || (browserMode == run.BrowserModeAuto && !s.hasClients()) { + u := fmt.Sprintf("http://localhost:%d/%s", s.dashPort, r.App.PlatformOrLocalID()) + browser.Open(u) + } + s.notify(¬ification{ Method: "process/start", - Params: map[string]interface{}{ - "appID": r.AppID, - "pid": r.ID, - "port": r.Port, - "meta": json.RawMessage(str), - }, + Params: status, + }) +} + +func (s *Server) OnCompileStart(r *run.Run) { + status, err := buildAppStatus(r.App, r) + if err != nil { + log.Error().Err(err).Msg("dash: could not build app status") + return + } + + status.Compiling = true + + s.notify(¬ification{ + Method: "process/compile-start", + Params: status, }) } // OnReload notifies active websocket clients about the reloaded run. func (s *Server) OnReload(r *run.Run) { - m := &jsonpb.Marshaler{OrigName: true, EmitDefaults: true} - proc := r.Proc() - str, err := m.MarshalToString(proc.Meta) + status, err := buildAppStatus(r.App, r) if err != nil { - log.Error().Err(err).Msg("dash: could not marshal app meta") + log.Error().Err(err).Msg("dash: could not build app status") return } + s.notify(¬ification{ Method: "process/reload", - Params: map[string]interface{}{ - "appID": r.AppID, - "pid": r.ID, - "meta": json.RawMessage(str), - }, + Params: status, }) } // OnStop notifies active websocket clients about the stopped run. func (s *Server) OnStop(r *run.Run) { + status, err := buildAppStatus(r.App, nil) + if err != nil { + log.Error().Err(err).Msg("dash: could not build app status") + return + } + s.notify(¬ification{ Method: "process/stop", - Params: map[string]interface{}{ - "appID": r.AppID, - "pid": r.ID, - }, + Params: status, }) } @@ -254,6 +702,27 @@ func (s *Server) OnStderr(r *run.Run, out []byte) { s.onOutput(r, out) } +func (s *Server) OnError(r *run.Run, err *errlist.List) { + if err == nil { + return + } + + status, statusErr := buildAppStatus(r.App, nil) + if statusErr != nil { + log.Error().Err(statusErr).Msg("dash: could not build app status") + return + } + + err.MakeRelative(r.App.Root(), "") + + status.CompileError = err.Error() + + s.notify(¬ification{ + Method: "process/compile-error", + Params: status, + }) +} + func (s *Server) onOutput(r *run.Run, out []byte) { // Copy to a new slice since we cannot retain it after the call ends, and notify is async. out2 := make([]byte, len(out)) @@ -261,9 +730,151 @@ func (s *Server) onOutput(r *run.Run, out []byte) { s.notify(¬ification{ Method: "process/output", Params: map[string]interface{}{ - "appID": r.AppID, + "appID": r.App.PlatformOrLocalID(), "pid": r.ID, "output": out2, }, }) } + +// protoReplier is a jsonrpc2.Replier that wraps another replier and serializes +// any protobuf message with protojson. +func makeProtoReplier(rep jsonrpc2.Replier) jsonrpc2.Replier { + return func(ctx context.Context, result any, err error) error { + if err != nil { + return rep(ctx, nil, err) + } + jsonData, err := jsonext.ProtoEncoder.Marshal(result) + return rep(ctx, json.RawMessage(jsonData), err) + } +} + +// appStatus is the the shared data structure to communicate app status to the client. +// +// It is mirrored in the frontend at src/lib/client/dev-dash-client.ts as `AppStatus`. +type appStatus struct { + Running bool `json:"running"` + Tutorial string `json:"tutorial,omitempty"` + AppID string `json:"appID"` + PlatformID string `json:"platformID,omitempty"` + AppRoot string `json:"appRoot"` + PID string `json:"pid,omitempty"` + Meta json.RawMessage `json:"meta,omitempty"` + Addr string `json:"addr,omitempty"` + APIEncoding *encoding.APIEncoding `json:"apiEncoding,omitempty"` + Compiling bool `json:"compiling"` + CompileError string `json:"compileError,omitempty"` +} + +type dbMigrationHistory struct { + DatabaseName string `json:"databaseName"` + Migrations []dbMigration `json:"migrations"` +} + +type dbMigration struct { + Filename string `json:"filename"` + Number uint64 `json:"number"` + Description string `json:"description"` + Applied bool `json:"applied"` +} + +func buildAppStatus(app *apps.Instance, runInstance *run.Run) (s appStatus, err error) { + // Now try and grab latest metadata for the app + var md *meta.Data + if runInstance != nil { + proc := runInstance.ProcGroup() + if proc != nil { + md = proc.Meta + } + } + + if md == nil { + md, err = app.CachedMetadata() + if err != nil { + return appStatus{}, err + } + } + + // Convert the metadata into a format we can send to the client + mdStr := "null" + var apiEnc *encoding.APIEncoding + if md != nil { + m := &jsonpb.Marshaler{OrigName: true, EmitDefaults: true} + + mdStr, err = m.MarshalToString(md) + if err != nil { + return appStatus{}, err + } + + apiEnc = encoding.DescribeAPI(md) + } + + // Build the response + resp := appStatus{ + Running: false, + Tutorial: app.Tutorial(), + AppID: app.PlatformOrLocalID(), + PlatformID: app.PlatformID(), + Meta: json.RawMessage(mdStr), + AppRoot: app.Root(), + APIEncoding: apiEnc, + } + if runInstance != nil { + resp.Running = true + resp.PID = runInstance.ID + resp.Addr = runInstance.ListenAddr + } + + return resp, nil +} + +func buildDbMigrationStatus(ctx context.Context, appMeta *meta.Data, cluster *sqldb.Cluster) []dbMigrationHistory { + var statuses []dbMigrationHistory + for _, dbMeta := range appMeta.SqlDatabases { + db, ok := cluster.GetDB(dbMeta.Name) + if !ok { + // Remote database migration status are not supported yet + continue + } + appliedVersions, err := db.ListAppliedMigrations(ctx) + if err != nil { + log.Error().Msgf("failed to list applied migrations for database %s: %v", dbMeta.Name, err) + continue + } + statuses = append(statuses, buildMigrationHistory(dbMeta, appliedVersions)) + } + return statuses +} + +func buildMigrationHistory(dbMeta *meta.SQLDatabase, appliedVersions map[uint64]bool) dbMigrationHistory { + history := dbMigrationHistory{ + DatabaseName: dbMeta.Name, + Migrations: []dbMigration{}, + } + // Go over migrations from latest to earliest + sortedMigrations := make([]*meta.DBMigration, len(dbMeta.Migrations)) + copy(sortedMigrations, dbMeta.Migrations) + slices.SortStableFunc(sortedMigrations, func(a, b *meta.DBMigration) int { + return int(b.Number - a.Number) + }) + implicitlyApplied := false + for _, migration := range sortedMigrations { + dirty, attempted := appliedVersions[migration.Number] + applied := attempted && !dirty + // If the database doesn't allow non-sequential migrations, + // then any migrations before the last applied will also have + // been applied even if we don't see them in the database. + if !dbMeta.AllowNonSequentialMigrations && applied { + implicitlyApplied = true + } + + status := dbMigration{ + Filename: migration.Filename, + Number: migration.Number, + Description: migration.Description, + Applied: applied || implicitlyApplied, + } + history.Migrations = append(history.Migrations, status) + } + return history +} diff --git a/cli/daemon/dash/dash_test.go b/cli/daemon/dash/dash_test.go new file mode 100644 index 0000000000..b5fa38acd9 --- /dev/null +++ b/cli/daemon/dash/dash_test.go @@ -0,0 +1,138 @@ +package dash + +import ( + "reflect" + "testing" + + meta "encr.dev/proto/encore/parser/meta/v1" +) + +func TestBuildMigrationHistory(t *testing.T) { + tests := []struct { + name string + dbMeta *meta.SQLDatabase + appliedVersions map[uint64]bool + want dbMigrationHistory + }{ + { + name: "sequential migrations all applied cleanly", + dbMeta: &meta.SQLDatabase{ + Name: "test-db", + Migrations: []*meta.DBMigration{ + {Number: 1, Filename: "001.sql", Description: "first"}, + {Number: 2, Filename: "002.sql", Description: "second"}, + {Number: 3, Filename: "003.sql", Description: "third"}, + }, + AllowNonSequentialMigrations: false, + }, + appliedVersions: map[uint64]bool{ + 1: false, // clean + 2: false, // clean + 3: false, // clean + }, + want: dbMigrationHistory{ + DatabaseName: "test-db", + Migrations: []dbMigration{ + {Number: 3, Filename: "003.sql", Description: "third", Applied: true}, + {Number: 2, Filename: "002.sql", Description: "second", Applied: true}, + {Number: 1, Filename: "001.sql", Description: "first", Applied: true}, + }, + }, + }, + { + name: "sequential migrations with dirty migration", + dbMeta: &meta.SQLDatabase{ + Name: "test-db", + Migrations: []*meta.DBMigration{ + {Number: 1, Filename: "001.sql", Description: "first"}, + {Number: 2, Filename: "002.sql", Description: "second"}, + {Number: 3, Filename: "003.sql", Description: "third"}, + }, + AllowNonSequentialMigrations: false, + }, + appliedVersions: map[uint64]bool{ + 1: false, // clean + 2: true, // dirty + }, + want: dbMigrationHistory{ + DatabaseName: "test-db", + Migrations: []dbMigration{ + {Number: 3, Filename: "003.sql", Description: "third", Applied: false}, + {Number: 2, Filename: "002.sql", Description: "second", Applied: false}, + {Number: 1, Filename: "001.sql", Description: "first", Applied: true}, + }, + }, + }, + { + name: "sequential migrations partially applied", + dbMeta: &meta.SQLDatabase{ + Name: "test-db", + Migrations: []*meta.DBMigration{ + {Number: 1, Filename: "001.sql", Description: "first"}, + {Number: 2, Filename: "002.sql", Description: "second"}, + {Number: 3, Filename: "003.sql", Description: "third"}, + }, + AllowNonSequentialMigrations: false, + }, + appliedVersions: map[uint64]bool{ + 1: false, // clean + 2: false, // clean + }, + want: dbMigrationHistory{ + DatabaseName: "test-db", + Migrations: []dbMigration{ + {Number: 3, Filename: "003.sql", Description: "third", Applied: false}, + {Number: 2, Filename: "002.sql", Description: "second", Applied: true}, + {Number: 1, Filename: "001.sql", Description: "first", Applied: true}, + }, + }, + }, + { + name: "non-sequential migrations with mix of clean and dirty", + dbMeta: &meta.SQLDatabase{ + Name: "test-db", + Migrations: []*meta.DBMigration{ + {Number: 1, Filename: "001.sql", Description: "first"}, + {Number: 2, Filename: "002.sql", Description: "second"}, + {Number: 3, Filename: "003.sql", Description: "third"}, + }, + AllowNonSequentialMigrations: true, + }, + appliedVersions: map[uint64]bool{ + 1: false, // clean + 2: true, // dirty + 3: false, // clean + }, + want: dbMigrationHistory{ + DatabaseName: "test-db", + Migrations: []dbMigration{ + {Number: 3, Filename: "003.sql", Description: "third", Applied: true}, + {Number: 2, Filename: "002.sql", Description: "second", Applied: false}, + {Number: 1, Filename: "001.sql", Description: "first", Applied: true}, + }, + }, + }, + { + name: "empty migrations list", + dbMeta: &meta.SQLDatabase{ + Name: "test-db", + Migrations: []*meta.DBMigration{}, + AllowNonSequentialMigrations: false, + }, + appliedVersions: map[uint64]bool{}, + want: dbMigrationHistory{ + DatabaseName: "test-db", + Migrations: []dbMigration{}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := buildMigrationHistory(tt.dbMeta, tt.appliedVersions) + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("buildMigrationHistory() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/cli/daemon/dash/dashapp/.gitignore b/cli/daemon/dash/dashapp/.gitignore deleted file mode 100644 index d451ff16c1..0000000000 --- a/cli/daemon/dash/dashapp/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -node_modules -.DS_Store -dist -dist-ssr -*.local diff --git a/cli/daemon/dash/dashapp/index.html b/cli/daemon/dash/dashapp/index.html deleted file mode 100644 index aa72ddc665..0000000000 --- a/cli/daemon/dash/dashapp/index.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - Encore Dashboard - - -
- - - - diff --git a/cli/daemon/dash/dashapp/package-lock.json b/cli/daemon/dash/dashapp/package-lock.json deleted file mode 100644 index ef5fc1dd9b..0000000000 --- a/cli/daemon/dash/dashapp/package-lock.json +++ /dev/null @@ -1,2883 +0,0 @@ -{ - "name": "dashapp", - "version": "0.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "version": "0.0.0", - "dependencies": { - "@headlessui/react": "^0.2.0", - "@tailwindcss/forms": "^0.2.1", - "codemirror": "^5.59.2", - "events": "^3.2.0", - "json-rpc-protocol": "^0.13.1", - "luxon": "^1.25.0", - "react": "^17.0.0", - "react-dom": "^17.0.0", - "react-router-dom": "^5.2.0", - "tailwindcss": "^2.0.2" - }, - "devDependencies": { - "@types/codemirror": "^0.0.108", - "@types/events": "^3.0.0", - "@types/luxon": "^1.25.1", - "@types/node": "^14.14.25", - "@types/react": "^17.0.0", - "@types/react-dom": "^17.0.0", - "@types/react-router-dom": "^5.1.7", - "@vitejs/plugin-react-refresh": "^1.1.0", - "autoprefixer": "^10.2.4", - "postcss": "^8.2.5", - "typescript": "^4.1.2", - "vite": "^2.0.0-beta.64" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", - "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", - "dev": true, - "dependencies": { - "@babel/highlight": "^7.12.13" - } - }, - "node_modules/@babel/core": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.13.tgz", - "integrity": "sha512-BQKE9kXkPlXHPeqissfxo0lySWJcYdEP0hdtJOH/iJfDdhOCcgtNCjftCJg3qqauB4h+lz2N6ixM++b9DN1Tcw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.12.13", - "@babel/helper-module-transforms": "^7.12.13", - "@babel/helpers": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", - "json5": "^2.1.2", - "lodash": "^4.17.19", - "semver": "^5.4.1", - "source-map": "^0.5.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/generator": { - "version": "7.12.15", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.12.15.tgz", - "integrity": "sha512-6F2xHxBiFXWNSGb7vyCUTBF8RCLY66rS0zEPcP8t/nQyXjha5EuK4z7H5o7fWG8B4M7y6mqVWq1J+1PuwRhecQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13", - "jsesc": "^2.5.1", - "source-map": "^0.5.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", - "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", - "dev": true, - "dependencies": { - "@babel/helper-get-function-arity": "^7.12.13", - "@babel/template": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-get-function-arity": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz", - "integrity": "sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.13.tgz", - "integrity": "sha512-B+7nN0gIL8FZ8SvMcF+EPyB21KnCcZHQZFczCxbiNGV/O0rsrSBlWGLzmtBJ3GMjSVMIm4lpFhR+VdVBuIsUcQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz", - "integrity": "sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.12.13.tgz", - "integrity": "sha512-acKF7EjqOR67ASIlDTupwkKM1eUisNAjaSduo5Cz+793ikfnpe7p4Q7B7EWU2PCoSTPWsQkR7hRUWEIZPiVLGA==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.12.13", - "@babel/helper-replace-supers": "^7.12.13", - "@babel/helper-simple-access": "^7.12.13", - "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/helper-validator-identifier": "^7.12.11", - "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13", - "lodash": "^4.17.19" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz", - "integrity": "sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz", - "integrity": "sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA==", - "dev": true - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz", - "integrity": "sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg==", - "dev": true, - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.12.13", - "@babel/helper-optimise-call-expression": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-simple-access": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz", - "integrity": "sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz", - "integrity": "sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", - "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", - "dev": true - }, - "node_modules/@babel/helpers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.12.13.tgz", - "integrity": "sha512-oohVzLRZ3GQEk4Cjhfs9YkJA4TdIDTObdBEZGrd6F/T0GPSnuV6l22eMcxlvcvzVIPH3VTtxbseudM1zIE+rPQ==", - "dev": true, - "dependencies": { - "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/highlight": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.12.13.tgz", - "integrity": "sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==", - "dev": true, - "dependencies": { - "@babel/helper-validator-identifier": "^7.12.11", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.12.15", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.15.tgz", - "integrity": "sha512-AQBOU2Z9kWwSZMd6lNjCX0GUgFonL1wAM1db8L8PMk9UDaGsRCArBkU4Sc+UCM3AE4hjbXx+h58Lb3QT4oRmrA==", - "dev": true, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-syntax-import-meta": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", - "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - } - }, - "node_modules/@babel/runtime": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.13.tgz", - "integrity": "sha512-8+3UMPBrjFa/6TtKi/7sehPKqfAm4g6K+YQjyyFOLUTxzOngcRZTlAVY8sc2CORJYqdHQY8gRPHmn+qo15rCBw==", - "dependencies": { - "regenerator-runtime": "^0.13.4" - } - }, - "node_modules/@babel/template": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.13.tgz", - "integrity": "sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "node_modules/@babel/traverse": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.13.tgz", - "integrity": "sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.12.13", - "@babel/helper-function-name": "^7.12.13", - "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/types": "^7.12.13", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.19" - } - }, - "node_modules/@babel/types": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.13.tgz", - "integrity": "sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ==", - "dev": true, - "dependencies": { - "@babel/helper-validator-identifier": "^7.12.11", - "lodash": "^4.17.19", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@fullhuman/postcss-purgecss": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@fullhuman/postcss-purgecss/-/postcss-purgecss-3.1.3.tgz", - "integrity": "sha512-kwOXw8fZ0Lt1QmeOOrd+o4Ibvp4UTEBFQbzvWldjlKv5n+G9sXfIPn1hh63IQIL8K8vbvv1oYMJiIUbuy9bGaA==", - "dependencies": { - "purgecss": "^3.1.3" - } - }, - "node_modules/@headlessui/react": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-0.2.0.tgz", - "integrity": "sha512-YV+vF+QhTRcspydPdHF3ZXe+FkOiJpRdqMjjFIIX9bSdT2O2T7GurgKQdGgamNUM+B99MZBOTRqxS8Dlh485eg==", - "engines": { - "node": ">=10" - } - }, - "node_modules/@tailwindcss/forms": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.2.1.tgz", - "integrity": "sha512-czfvEdY+J2Ogfd6RUSr/ZSUmDxTujr34M++YLnp2cCPC3oJ4kFvFMaRXA6cEXKw7F1hJuapdjXRjsXIEXGgORg==", - "dependencies": { - "mini-svg-data-uri": "^1.2.3" - } - }, - "node_modules/@types/codemirror": { - "version": "0.0.108", - "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.108.tgz", - "integrity": "sha512-3FGFcus0P7C2UOGCNUVENqObEb4SFk+S8Dnxq7K6aIsLVs/vDtlangl3PEO0ykaKXyK56swVF6Nho7VsA44uhw==", - "dev": true, - "dependencies": { - "@types/tern": "*" - } - }, - "node_modules/@types/estree": { - "version": "0.0.46", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.46.tgz", - "integrity": "sha512-laIjwTQaD+5DukBZaygQ79K1Z0jb1bPEMRrkXSLjtCcZm+abyp5YbrqpSLzD42FwWW6gK/aS4NYpJ804nG2brg==", - "dev": true - }, - "node_modules/@types/events": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz", - "integrity": "sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==", - "dev": true - }, - "node_modules/@types/history": { - "version": "4.7.8", - "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.8.tgz", - "integrity": "sha512-S78QIYirQcUoo6UJZx9CSP0O2ix9IaeAXwQi26Rhr/+mg7qqPy8TzaxHSUut7eGjL8WmLccT7/MXf304WjqHcA==", - "dev": true - }, - "node_modules/@types/luxon": { - "version": "1.25.1", - "resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-1.25.1.tgz", - "integrity": "sha512-enkMO4WJcbdkhK1eZrItF616buau02wtrSN+DDt9Qj9U23boSAXNJm0fMlgwpTDaRHq3S0D/SPIRbxy4YxBjiA==", - "dev": true - }, - "node_modules/@types/node": { - "version": "14.14.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.25.tgz", - "integrity": "sha512-EPpXLOVqDvisVxtlbvzfyqSsFeQxltFbluZNRndIb8tr9KiBnYNLzrc1N3pyKUCww2RNrfHDViqDWWE1LCJQtQ==", - "dev": true - }, - "node_modules/@types/prop-types": { - "version": "15.7.3", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.3.tgz", - "integrity": "sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw==", - "dev": true - }, - "node_modules/@types/react": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.1.tgz", - "integrity": "sha512-w8t9f53B2ei4jeOqf/gxtc2Sswnc3LBK5s0DyJcg5xd10tMHXts2N31cKjWfH9IC/JvEPa/YF1U4YeP1t4R6HQ==", - "dev": true, - "dependencies": { - "@types/prop-types": "*", - "csstype": "^3.0.2" - } - }, - "node_modules/@types/react-dom": { - "version": "17.0.0", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.0.tgz", - "integrity": "sha512-lUqY7OlkF/RbNtD5nIq7ot8NquXrdFrjSOR6+w9a9RFQevGi1oZO1dcJbXMeONAPKtZ2UrZOEJ5UOCVsxbLk/g==", - "dev": true, - "dependencies": { - "@types/react": "*" - } - }, - "node_modules/@types/react-router": { - "version": "5.1.11", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.11.tgz", - "integrity": "sha512-ofHbZMlp0Y2baOHgsWBQ4K3AttxY61bDMkwTiBOkPg7U6C/3UwwB5WaIx28JmSVi/eX3uFEMRo61BV22fDQIvg==", - "dev": true, - "dependencies": { - "@types/history": "*", - "@types/react": "*" - } - }, - "node_modules/@types/react-router-dom": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.1.7.tgz", - "integrity": "sha512-D5mHD6TbdV/DNHYsnwBTv+y73ei+mMjrkGrla86HthE4/PVvL1J94Bu3qABU+COXzpL23T1EZapVVpwHuBXiUg==", - "dev": true, - "dependencies": { - "@types/history": "*", - "@types/react": "*", - "@types/react-router": "*" - } - }, - "node_modules/@types/tern": { - "version": "0.23.3", - "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.3.tgz", - "integrity": "sha512-imDtS4TAoTcXk0g7u4kkWqedB3E4qpjXzCpD2LU5M5NAXHzCDsypyvXSaG7mM8DKYkCRa7tFp4tS/lp/Wo7Q3w==", - "dev": true, - "dependencies": { - "@types/estree": "*" - } - }, - "node_modules/@vitejs/plugin-react-refresh": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-refresh/-/plugin-react-refresh-1.2.2.tgz", - "integrity": "sha512-MEVSqncF/u1nvfeZsBJtPc3pLZWccN77CjY0itW7/Vji5BMmttW25a1kjSmooE+4JK4kaF3ElwF3LbV2kiVZWw==", - "dev": true, - "dependencies": { - "@babel/core": "^7.12.10", - "@babel/plugin-syntax-import-meta": "^7.10.4", - "react-refresh": "^0.9.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-node": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz", - "integrity": "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==", - "dependencies": { - "acorn": "^7.0.0", - "acorn-walk": "^7.0.0", - "xtend": "^4.0.2" - } - }, - "node_modules/acorn-walk": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", - "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/autoprefixer": { - "version": "10.2.4", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.2.4.tgz", - "integrity": "sha512-DCCdUQiMD+P/as8m3XkeTUkUKuuRqLGcwD0nll7wevhqoJfMRpJlkFd1+MQh1pvupjiQuip42lc/VFvfUTMSKw==", - "dev": true, - "dependencies": { - "browserslist": "^4.16.1", - "caniuse-lite": "^1.0.30001181", - "colorette": "^1.2.1", - "fraction.js": "^4.0.13", - "normalize-range": "^0.1.2", - "postcss-value-parser": "^4.1.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/browserslist": { - "version": "4.16.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.3.tgz", - "integrity": "sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw==", - "dev": true, - "dependencies": { - "caniuse-lite": "^1.0.30001181", - "colorette": "^1.2.1", - "electron-to-chromium": "^1.3.649", - "escalade": "^3.1.1", - "node-releases": "^1.1.70" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/bytes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", - "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/camelcase-css": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", - "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001185", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001185.tgz", - "integrity": "sha512-Fpi4kVNtNvJ15H0F6vwmXtb3tukv3Zg3qhKkOGUq7KJ1J6b9kf4dnNgtEAFXhRsJo0gNj9W60+wBvn0JcTvdTg==", - "dev": true - }, - "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/codemirror": { - "version": "5.59.2", - "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.59.2.tgz", - "integrity": "sha512-/D5PcsKyzthtSy2NNKCyJi3b+htRkoKv3idswR/tR6UAvMNKA7SrmyZy6fOONJxSRs1JlUWEDAbxqfdArbK8iA==" - }, - "node_modules/color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/color/-/color-3.1.3.tgz", - "integrity": "sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==", - "dependencies": { - "color-convert": "^1.9.1", - "color-string": "^1.5.4" - } - }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "node_modules/color-string": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.4.tgz", - "integrity": "sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==", - "dependencies": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, - "node_modules/colorette": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.1.tgz", - "integrity": "sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw==" - }, - "node_modules/commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "node_modules/convert-source-map": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", - "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.1" - } - }, - "node_modules/css-unit-converter": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.2.tgz", - "integrity": "sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==" - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/csstype": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.6.tgz", - "integrity": "sha512-+ZAmfyWMT7TiIlzdqJgjMb7S4f1beorDbWbsocyK4RaiqA5RTX3K14bnBWmmA9QEM0gRdsjyyrEmcyga8Zsxmw==", - "dev": true - }, - "node_modules/debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - } - }, - "node_modules/defined": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", - "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" - }, - "node_modules/detective": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/detective/-/detective-5.2.0.tgz", - "integrity": "sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg==", - "dependencies": { - "acorn-node": "^1.6.1", - "defined": "^1.0.0", - "minimist": "^1.1.1" - }, - "bin": { - "detective": "bin/detective.js" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/didyoumean": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.1.tgz", - "integrity": "sha1-6S7f2tplN9SE1zwBcv0eugxJdv8=" - }, - "node_modules/electron-to-chromium": { - "version": "1.3.657", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.657.tgz", - "integrity": "sha512-/9ROOyvEflEbaZFUeGofD+Tqs/WynbSTbNgNF+/TJJxH1ePD/e6VjZlDJpW3FFFd3nj5l3Hd8ki2vRwy+gyRFw==", - "dev": true - }, - "node_modules/esbuild": { - "version": "0.8.42", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.8.42.tgz", - "integrity": "sha512-zUtj5RMqROCCCH0vV/a7cd8YQg8I0GWBhV3A3PklWRT+oM/YwVbnrtFnITzE1otGdnXplWHWdZ4OcYiV0PN+JQ==", - "dev": true, - "bin": { - "esbuild": "bin/esbuild" - } - }, - "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/events": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.2.0.tgz", - "integrity": "sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg==", - "engines": { - "node": ">=0.8.x" - } - }, - "node_modules/fraction.js": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.0.13.tgz", - "integrity": "sha512-E1fz2Xs9ltlUp+qbiyx9wmt2n9dRzPsS11Jtdb8D2o+cC7wr9xkkKsVKJuBX0ST+LVS+LhLO+SbLJNtfWcJvXA==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "dependencies": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "node_modules/fsevents": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", - "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - } - }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.5.tgz", - "integrity": "sha512-kBBSQbz2K0Nyn+31j/w36fUfxkBW9/gfwRWdUY1ULReH3iokVJgddZAFcD1D0xlgTmFxJCbUkUclAlc6/IDJkw==" - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "engines": { - "node": ">=4" - } - }, - "node_modules/history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", - "dependencies": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" - } - }, - "node_modules/hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "dependencies": { - "react-is": "^16.7.0" - } - }, - "node_modules/html-tags": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.1.0.tgz", - "integrity": "sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/indexes-of": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", - "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=" - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - }, - "node_modules/is-core-module": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", - "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", - "dependencies": { - "has": "^1.0.3" - } - }, - "node_modules/isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/json-rpc-protocol": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/json-rpc-protocol/-/json-rpc-protocol-0.13.1.tgz", - "integrity": "sha512-gdb0TnNf0ITRjLm0QGNODgK1E6ORLhe+6L+RV8owg4X3d6x8jAfyHQC+xMG4T/qU2SPaNLpav29QRLC+3oF6gg==", - "dependencies": { - "make-error": "^1.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/json5": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", - "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", - "dev": true, - "dependencies": { - "minimist": "^1.2.5" - }, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dependencies": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "node_modules/lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==" - }, - "node_modules/lodash.toarray": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.toarray/-/lodash.toarray-4.4.0.tgz", - "integrity": "sha1-JMS/zWsvuji/0FlNsRedjptlZWE=" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/luxon": { - "version": "1.25.0", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-1.25.0.tgz", - "integrity": "sha512-hEgLurSH8kQRjY6i4YLey+mcKVAWXbDNlZRmM6AgWDJ1cY3atl8Ztf5wEY7VBReFbmGnwQPz7KYJblL8B2k0jQ==", - "engines": { - "node": "*" - } - }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" - }, - "node_modules/mini-create-react-context": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz", - "integrity": "sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==", - "dependencies": { - "@babel/runtime": "^7.12.1", - "tiny-warning": "^1.0.3" - } - }, - "node_modules/mini-svg-data-uri": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.2.3.tgz", - "integrity": "sha512-zd6KCAyXgmq6FV1mR10oKXYtvmA9vRoB6xPSTUJTbFApCtkefDnYueVR1gkof3KcdLZo1Y8mjF2DFmQMIxsHNQ==" - }, - "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - }, - "node_modules/modern-normalize": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/modern-normalize/-/modern-normalize-1.0.0.tgz", - "integrity": "sha512-1lM+BMLGuDfsdwf3rsgBSrxJwAZHFIrQ8YR61xIqdHo0uNKI9M52wNpHSrliZATJp51On6JD0AfRxd4YGSU0lw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/nanoid": { - "version": "3.1.20", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", - "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/node-emoji": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.10.0.tgz", - "integrity": "sha512-Yt3384If5H6BYGVHiHwTL+99OzJKHhgp82S8/dktEK73T26BazdgZ4JZh92xSVtGNJvz9UbXdNAc5hcrXV42vw==", - "dependencies": { - "lodash.toarray": "^4.4.0" - } - }, - "node_modules/node-releases": { - "version": "1.1.70", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.70.tgz", - "integrity": "sha512-Slf2s69+2/uAD79pVVQo8uSiC34+g8GWY8UH2Qtqv34ZfhYrxpYpfzs9Js9d6O0mbDmALuxaTlplnBTnSELcrw==", - "dev": true - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-hash": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.1.1.tgz", - "integrity": "sha512-VOJmgmS+7wvXf8CjbQmimtCnEx3IAoLxI3fp2fbWehxrWBcAQFbk+vcwb6vzR0VZv/eNCJ/27j151ZTwqW/JeQ==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" - }, - "node_modules/path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", - "dependencies": { - "isarray": "0.0.1" - } - }, - "node_modules/postcss": { - "version": "8.2.5", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.5.tgz", - "integrity": "sha512-wMcb7BpDcm3gxQOQx46NDNT36Kk0Ao6PJLLI2ed5vehbbbxCEuslSQzbQ2sfSKy+gkYxhWcGWSeaK+gwm4KIZg==", - "dependencies": { - "colorette": "^1.2.1", - "nanoid": "^3.1.20", - "source-map": "^0.6.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-functions": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/postcss-functions/-/postcss-functions-3.0.0.tgz", - "integrity": "sha1-DpTQFERwCkgd4g3k1V+yZAVkJQ4=", - "dependencies": { - "glob": "^7.1.2", - "object-assign": "^4.1.1", - "postcss": "^6.0.9", - "postcss-value-parser": "^3.3.0" - } - }, - "node_modules/postcss-functions/node_modules/postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dependencies": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/postcss-functions/node_modules/postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" - }, - "node_modules/postcss-functions/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postcss-js": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-3.0.3.tgz", - "integrity": "sha512-gWnoWQXKFw65Hk/mi2+WTQTHdPD5UJdDXZmX073EY/B3BWnYjO4F4t0VneTCnCGQ5E5GsCdMkzPaTXwl3r5dJw==", - "dependencies": { - "camelcase-css": "^2.0.1", - "postcss": "^8.1.6" - }, - "engines": { - "node": ">=10.0" - } - }, - "node_modules/postcss-nested": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-5.0.3.tgz", - "integrity": "sha512-R2LHPw+u5hFfDgJG748KpGbJyTv7Yr33/2tIMWxquYuHTd9EXu27PYnKi7BxMXLtzKC0a0WVsqHtd7qIluQu/g==", - "dependencies": { - "postcss-selector-parser": "^6.0.4" - }, - "engines": { - "node": ">=10.0" - } - }, - "node_modules/postcss-selector-parser": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.4.tgz", - "integrity": "sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw==", - "dependencies": { - "cssesc": "^3.0.0", - "indexes-of": "^1.0.1", - "uniq": "^1.0.1", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz", - "integrity": "sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==" - }, - "node_modules/postcss/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pretty-hrtime": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz", - "integrity": "sha1-t+PqQkNaTJsnWdmeDyAesZWALuE=", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, - "node_modules/purgecss": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/purgecss/-/purgecss-3.1.3.tgz", - "integrity": "sha512-hRSLN9mguJ2lzlIQtW4qmPS2kh6oMnA9RxdIYK8sz18QYqd6ePp4GNDl18oWHA1f2v2NEQIh51CO8s/E3YGckQ==", - "dependencies": { - "commander": "^6.0.0", - "glob": "^7.0.0", - "postcss": "^8.2.1", - "postcss-selector-parser": "^6.0.2" - }, - "bin": { - "purgecss": "bin/purgecss.js" - } - }, - "node_modules/react": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/react/-/react-17.0.1.tgz", - "integrity": "sha512-lG9c9UuMHdcAexXtigOZLX8exLWkW0Ku29qPRU8uhF2R9BN96dLCt0psvzPLlHc5OWkgymP3qwTRgbnw5BKx3w==", - "dependencies": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.1.tgz", - "integrity": "sha512-6eV150oJZ9U2t9svnsspTMrWNyHc6chX0KzDeAOXftRa8bNeOKTTfCJ7KorIwenkHd2xqVTBTCZd79yk/lx/Ug==", - "dependencies": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1", - "scheduler": "^0.20.1" - } - }, - "node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "node_modules/react-refresh": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.9.0.tgz", - "integrity": "sha512-Gvzk7OZpiqKSkxsQvO/mbTN1poglhmAV7gR/DdIrRrSMXraRQQlfikRJOr3Nb9GTMPC5kof948Zy6jJZIFtDvQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-router": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.2.0.tgz", - "integrity": "sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==", - "dependencies": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "hoist-non-react-statics": "^3.1.0", - "loose-envify": "^1.3.1", - "mini-create-react-context": "^0.4.0", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.2", - "react-is": "^16.6.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - } - }, - "node_modules/react-router-dom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.2.0.tgz", - "integrity": "sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==", - "dependencies": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "loose-envify": "^1.3.1", - "prop-types": "^15.6.2", - "react-router": "5.2.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - } - }, - "node_modules/reduce-css-calc": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz", - "integrity": "sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg==", - "dependencies": { - "css-unit-converter": "^1.1.1", - "postcss-value-parser": "^3.3.0" - } - }, - "node_modules/reduce-css-calc/node_modules/postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" - }, - "node_modules/regenerator-runtime": { - "version": "0.13.7", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz", - "integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==" - }, - "node_modules/resolve": { - "version": "1.19.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz", - "integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==", - "dependencies": { - "is-core-module": "^2.1.0", - "path-parse": "^1.0.6" - } - }, - "node_modules/resolve-pathname": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", - "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" - }, - "node_modules/rollup": { - "version": "2.38.5", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.38.5.tgz", - "integrity": "sha512-VoWt8DysFGDVRGWuHTqZzT02J0ASgjVq/hPs9QcBOGMd7B+jfTr/iqMVEyOi901rE3xq+Deq66GzIT1yt7sGwQ==", - "dev": true, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=10.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.1" - } - }, - "node_modules/rollup/node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/scheduler": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.1.tgz", - "integrity": "sha512-LKTe+2xNJBNxu/QhHvDR14wUXHRQbVY5ZOYpOGWRzhydZUqrLb2JBvLPY7cAqFmqrWuDED0Mjk7013SZiOz6Bw==", - "dependencies": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "dependencies": { - "is-arrayish": "^0.3.1" - } - }, - "node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/tailwindcss": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-2.0.2.tgz", - "integrity": "sha512-nO9JRE1pO7SF9RnYAl6g7uzeHdrmKAFqNjT9NtZUfxqimJZAOOLOEyIEUiMq12+xIc7mC2Ey3Vf90XjHpWKfbw==", - "dependencies": { - "@fullhuman/postcss-purgecss": "^3.0.0", - "bytes": "^3.0.0", - "chalk": "^4.1.0", - "color": "^3.1.3", - "detective": "^5.2.0", - "didyoumean": "^1.2.1", - "fs-extra": "^9.0.1", - "html-tags": "^3.1.0", - "lodash": "^4.17.20", - "modern-normalize": "^1.0.0", - "node-emoji": "^1.8.1", - "object-hash": "^2.0.3", - "postcss-functions": "^3", - "postcss-js": "^3.0.3", - "postcss-nested": "^5.0.1", - "postcss-selector-parser": "^6.0.4", - "postcss-value-parser": "^4.1.0", - "pretty-hrtime": "^1.0.3", - "reduce-css-calc": "^2.1.6", - "resolve": "^1.19.0" - }, - "bin": { - "tailwind": "lib/cli.js", - "tailwindcss": "lib/cli.js" - }, - "engines": { - "node": ">=12.13.0" - } - }, - "node_modules/tailwindcss/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tailwindcss/node_modules/chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tailwindcss/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/tailwindcss/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "node_modules/tailwindcss/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/tailwindcss/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tiny-invariant": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.1.0.tgz", - "integrity": "sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==" - }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" - }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/typescript": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.3.tgz", - "integrity": "sha512-B3ZIOf1IKeH2ixgHhj6la6xdwR9QrLC5d1VKeCSY4tvkqhF2eqd9O7txNlS0PO3GrBAFIdr3L1ndNwteUbZLYg==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/uniq": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=" - }, - "node_modules/universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "node_modules/value-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", - "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" - }, - "node_modules/vite": { - "version": "2.0.0-beta.65", - "resolved": "https://registry.npmjs.org/vite/-/vite-2.0.0-beta.65.tgz", - "integrity": "sha512-mdHNTP6fGeb8m8lWAM3UbSPw1+un1lUv0i4MQJcNiK2/P01RHIY02VjQeXBv3NemkExkgLji88LN9ySFMUQpIw==", - "dev": true, - "dependencies": { - "esbuild": "^0.8.34", - "postcss": "^8.2.1", - "resolve": "^1.19.0", - "rollup": "^2.35.1" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": ">=12.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.1.2" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "engines": { - "node": ">=0.4" - } - } - }, - "dependencies": { - "@babel/code-frame": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", - "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", - "dev": true, - "requires": { - "@babel/highlight": "^7.12.13" - } - }, - "@babel/core": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.13.tgz", - "integrity": "sha512-BQKE9kXkPlXHPeqissfxo0lySWJcYdEP0hdtJOH/iJfDdhOCcgtNCjftCJg3qqauB4h+lz2N6ixM++b9DN1Tcw==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.12.13", - "@babel/helper-module-transforms": "^7.12.13", - "@babel/helpers": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", - "json5": "^2.1.2", - "lodash": "^4.17.19", - "semver": "^5.4.1", - "source-map": "^0.5.0" - } - }, - "@babel/generator": { - "version": "7.12.15", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.12.15.tgz", - "integrity": "sha512-6F2xHxBiFXWNSGb7vyCUTBF8RCLY66rS0zEPcP8t/nQyXjha5EuK4z7H5o7fWG8B4M7y6mqVWq1J+1PuwRhecQ==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13", - "jsesc": "^2.5.1", - "source-map": "^0.5.0" - } - }, - "@babel/helper-function-name": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", - "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", - "dev": true, - "requires": { - "@babel/helper-get-function-arity": "^7.12.13", - "@babel/template": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-get-function-arity": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz", - "integrity": "sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-member-expression-to-functions": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.13.tgz", - "integrity": "sha512-B+7nN0gIL8FZ8SvMcF+EPyB21KnCcZHQZFczCxbiNGV/O0rsrSBlWGLzmtBJ3GMjSVMIm4lpFhR+VdVBuIsUcQ==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-module-imports": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz", - "integrity": "sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-module-transforms": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.12.13.tgz", - "integrity": "sha512-acKF7EjqOR67ASIlDTupwkKM1eUisNAjaSduo5Cz+793ikfnpe7p4Q7B7EWU2PCoSTPWsQkR7hRUWEIZPiVLGA==", - "dev": true, - "requires": { - "@babel/helper-module-imports": "^7.12.13", - "@babel/helper-replace-supers": "^7.12.13", - "@babel/helper-simple-access": "^7.12.13", - "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/helper-validator-identifier": "^7.12.11", - "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13", - "lodash": "^4.17.19" - } - }, - "@babel/helper-optimise-call-expression": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz", - "integrity": "sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-plugin-utils": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz", - "integrity": "sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA==", - "dev": true - }, - "@babel/helper-replace-supers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz", - "integrity": "sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg==", - "dev": true, - "requires": { - "@babel/helper-member-expression-to-functions": "^7.12.13", - "@babel/helper-optimise-call-expression": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-simple-access": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz", - "integrity": "sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-split-export-declaration": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz", - "integrity": "sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg==", - "dev": true, - "requires": { - "@babel/types": "^7.12.13" - } - }, - "@babel/helper-validator-identifier": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", - "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", - "dev": true - }, - "@babel/helpers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.12.13.tgz", - "integrity": "sha512-oohVzLRZ3GQEk4Cjhfs9YkJA4TdIDTObdBEZGrd6F/T0GPSnuV6l22eMcxlvcvzVIPH3VTtxbseudM1zIE+rPQ==", - "dev": true, - "requires": { - "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "@babel/highlight": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.12.13.tgz", - "integrity": "sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==", - "dev": true, - "requires": { - "@babel/helper-validator-identifier": "^7.12.11", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - } - }, - "@babel/parser": { - "version": "7.12.15", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.15.tgz", - "integrity": "sha512-AQBOU2Z9kWwSZMd6lNjCX0GUgFonL1wAM1db8L8PMk9UDaGsRCArBkU4Sc+UCM3AE4hjbXx+h58Lb3QT4oRmrA==", - "dev": true - }, - "@babel/plugin-syntax-import-meta": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", - "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", - "dev": true, - "requires": { - "@babel/helper-plugin-utils": "^7.10.4" - } - }, - "@babel/runtime": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.13.tgz", - "integrity": "sha512-8+3UMPBrjFa/6TtKi/7sehPKqfAm4g6K+YQjyyFOLUTxzOngcRZTlAVY8sc2CORJYqdHQY8gRPHmn+qo15rCBw==", - "requires": { - "regenerator-runtime": "^0.13.4" - } - }, - "@babel/template": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.13.tgz", - "integrity": "sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/types": "^7.12.13" - } - }, - "@babel/traverse": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.13.tgz", - "integrity": "sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.12.13", - "@babel/helper-function-name": "^7.12.13", - "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/types": "^7.12.13", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.19" - } - }, - "@babel/types": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.13.tgz", - "integrity": "sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ==", - "dev": true, - "requires": { - "@babel/helper-validator-identifier": "^7.12.11", - "lodash": "^4.17.19", - "to-fast-properties": "^2.0.0" - } - }, - "@fullhuman/postcss-purgecss": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@fullhuman/postcss-purgecss/-/postcss-purgecss-3.1.3.tgz", - "integrity": "sha512-kwOXw8fZ0Lt1QmeOOrd+o4Ibvp4UTEBFQbzvWldjlKv5n+G9sXfIPn1hh63IQIL8K8vbvv1oYMJiIUbuy9bGaA==", - "requires": { - "purgecss": "^3.1.3" - } - }, - "@headlessui/react": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-0.2.0.tgz", - "integrity": "sha512-YV+vF+QhTRcspydPdHF3ZXe+FkOiJpRdqMjjFIIX9bSdT2O2T7GurgKQdGgamNUM+B99MZBOTRqxS8Dlh485eg==" - }, - "@tailwindcss/forms": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.2.1.tgz", - "integrity": "sha512-czfvEdY+J2Ogfd6RUSr/ZSUmDxTujr34M++YLnp2cCPC3oJ4kFvFMaRXA6cEXKw7F1hJuapdjXRjsXIEXGgORg==", - "requires": { - "mini-svg-data-uri": "^1.2.3" - } - }, - "@types/codemirror": { - "version": "0.0.108", - "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.108.tgz", - "integrity": "sha512-3FGFcus0P7C2UOGCNUVENqObEb4SFk+S8Dnxq7K6aIsLVs/vDtlangl3PEO0ykaKXyK56swVF6Nho7VsA44uhw==", - "dev": true, - "requires": { - "@types/tern": "*" - } - }, - "@types/estree": { - "version": "0.0.46", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.46.tgz", - "integrity": "sha512-laIjwTQaD+5DukBZaygQ79K1Z0jb1bPEMRrkXSLjtCcZm+abyp5YbrqpSLzD42FwWW6gK/aS4NYpJ804nG2brg==", - "dev": true - }, - "@types/events": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz", - "integrity": "sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==", - "dev": true - }, - "@types/history": { - "version": "4.7.8", - "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.8.tgz", - "integrity": "sha512-S78QIYirQcUoo6UJZx9CSP0O2ix9IaeAXwQi26Rhr/+mg7qqPy8TzaxHSUut7eGjL8WmLccT7/MXf304WjqHcA==", - "dev": true - }, - "@types/luxon": { - "version": "1.25.1", - "resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-1.25.1.tgz", - "integrity": "sha512-enkMO4WJcbdkhK1eZrItF616buau02wtrSN+DDt9Qj9U23boSAXNJm0fMlgwpTDaRHq3S0D/SPIRbxy4YxBjiA==", - "dev": true - }, - "@types/node": { - "version": "14.14.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.25.tgz", - "integrity": "sha512-EPpXLOVqDvisVxtlbvzfyqSsFeQxltFbluZNRndIb8tr9KiBnYNLzrc1N3pyKUCww2RNrfHDViqDWWE1LCJQtQ==", - "dev": true - }, - "@types/prop-types": { - "version": "15.7.3", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.3.tgz", - "integrity": "sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw==", - "dev": true - }, - "@types/react": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.1.tgz", - "integrity": "sha512-w8t9f53B2ei4jeOqf/gxtc2Sswnc3LBK5s0DyJcg5xd10tMHXts2N31cKjWfH9IC/JvEPa/YF1U4YeP1t4R6HQ==", - "dev": true, - "requires": { - "@types/prop-types": "*", - "csstype": "^3.0.2" - } - }, - "@types/react-dom": { - "version": "17.0.0", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.0.tgz", - "integrity": "sha512-lUqY7OlkF/RbNtD5nIq7ot8NquXrdFrjSOR6+w9a9RFQevGi1oZO1dcJbXMeONAPKtZ2UrZOEJ5UOCVsxbLk/g==", - "dev": true, - "requires": { - "@types/react": "*" - } - }, - "@types/react-router": { - "version": "5.1.11", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.11.tgz", - "integrity": "sha512-ofHbZMlp0Y2baOHgsWBQ4K3AttxY61bDMkwTiBOkPg7U6C/3UwwB5WaIx28JmSVi/eX3uFEMRo61BV22fDQIvg==", - "dev": true, - "requires": { - "@types/history": "*", - "@types/react": "*" - } - }, - "@types/react-router-dom": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.1.7.tgz", - "integrity": "sha512-D5mHD6TbdV/DNHYsnwBTv+y73ei+mMjrkGrla86HthE4/PVvL1J94Bu3qABU+COXzpL23T1EZapVVpwHuBXiUg==", - "dev": true, - "requires": { - "@types/history": "*", - "@types/react": "*", - "@types/react-router": "*" - } - }, - "@types/tern": { - "version": "0.23.3", - "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.3.tgz", - "integrity": "sha512-imDtS4TAoTcXk0g7u4kkWqedB3E4qpjXzCpD2LU5M5NAXHzCDsypyvXSaG7mM8DKYkCRa7tFp4tS/lp/Wo7Q3w==", - "dev": true, - "requires": { - "@types/estree": "*" - } - }, - "@vitejs/plugin-react-refresh": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-refresh/-/plugin-react-refresh-1.2.2.tgz", - "integrity": "sha512-MEVSqncF/u1nvfeZsBJtPc3pLZWccN77CjY0itW7/Vji5BMmttW25a1kjSmooE+4JK4kaF3ElwF3LbV2kiVZWw==", - "dev": true, - "requires": { - "@babel/core": "^7.12.10", - "@babel/plugin-syntax-import-meta": "^7.10.4", - "react-refresh": "^0.9.0" - } - }, - "acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" - }, - "acorn-node": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz", - "integrity": "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==", - "requires": { - "acorn": "^7.0.0", - "acorn-walk": "^7.0.0", - "xtend": "^4.0.2" - } - }, - "acorn-walk": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", - "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==" - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==" - }, - "autoprefixer": { - "version": "10.2.4", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.2.4.tgz", - "integrity": "sha512-DCCdUQiMD+P/as8m3XkeTUkUKuuRqLGcwD0nll7wevhqoJfMRpJlkFd1+MQh1pvupjiQuip42lc/VFvfUTMSKw==", - "dev": true, - "requires": { - "browserslist": "^4.16.1", - "caniuse-lite": "^1.0.30001181", - "colorette": "^1.2.1", - "fraction.js": "^4.0.13", - "normalize-range": "^0.1.2", - "postcss-value-parser": "^4.1.0" - } - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "browserslist": { - "version": "4.16.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.3.tgz", - "integrity": "sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw==", - "dev": true, - "requires": { - "caniuse-lite": "^1.0.30001181", - "colorette": "^1.2.1", - "electron-to-chromium": "^1.3.649", - "escalade": "^3.1.1", - "node-releases": "^1.1.70" - } - }, - "bytes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", - "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" - }, - "camelcase-css": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", - "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" - }, - "caniuse-lite": { - "version": "1.0.30001185", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001185.tgz", - "integrity": "sha512-Fpi4kVNtNvJ15H0F6vwmXtb3tukv3Zg3qhKkOGUq7KJ1J6b9kf4dnNgtEAFXhRsJo0gNj9W60+wBvn0JcTvdTg==", - "dev": true - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "codemirror": { - "version": "5.59.2", - "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.59.2.tgz", - "integrity": "sha512-/D5PcsKyzthtSy2NNKCyJi3b+htRkoKv3idswR/tR6UAvMNKA7SrmyZy6fOONJxSRs1JlUWEDAbxqfdArbK8iA==" - }, - "color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/color/-/color-3.1.3.tgz", - "integrity": "sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==", - "requires": { - "color-convert": "^1.9.1", - "color-string": "^1.5.4" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "color-string": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.4.tgz", - "integrity": "sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==", - "requires": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, - "colorette": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.1.tgz", - "integrity": "sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw==" - }, - "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==" - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "convert-source-map": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", - "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.1" - } - }, - "css-unit-converter": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.2.tgz", - "integrity": "sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==" - }, - "cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" - }, - "csstype": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.6.tgz", - "integrity": "sha512-+ZAmfyWMT7TiIlzdqJgjMb7S4f1beorDbWbsocyK4RaiqA5RTX3K14bnBWmmA9QEM0gRdsjyyrEmcyga8Zsxmw==", - "dev": true - }, - "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "defined": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", - "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" - }, - "detective": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/detective/-/detective-5.2.0.tgz", - "integrity": "sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg==", - "requires": { - "acorn-node": "^1.6.1", - "defined": "^1.0.0", - "minimist": "^1.1.1" - } - }, - "didyoumean": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.1.tgz", - "integrity": "sha1-6S7f2tplN9SE1zwBcv0eugxJdv8=" - }, - "electron-to-chromium": { - "version": "1.3.657", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.657.tgz", - "integrity": "sha512-/9ROOyvEflEbaZFUeGofD+Tqs/WynbSTbNgNF+/TJJxH1ePD/e6VjZlDJpW3FFFd3nj5l3Hd8ki2vRwy+gyRFw==", - "dev": true - }, - "esbuild": { - "version": "0.8.42", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.8.42.tgz", - "integrity": "sha512-zUtj5RMqROCCCH0vV/a7cd8YQg8I0GWBhV3A3PklWRT+oM/YwVbnrtFnITzE1otGdnXplWHWdZ4OcYiV0PN+JQ==", - "dev": true - }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" - }, - "events": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.2.0.tgz", - "integrity": "sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg==" - }, - "fraction.js": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.0.13.tgz", - "integrity": "sha512-E1fz2Xs9ltlUp+qbiyx9wmt2n9dRzPsS11Jtdb8D2o+cC7wr9xkkKsVKJuBX0ST+LVS+LhLO+SbLJNtfWcJvXA==", - "dev": true - }, - "fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "requires": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "fsevents": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", - "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", - "dev": true, - "optional": true - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, - "graceful-fs": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.5.tgz", - "integrity": "sha512-kBBSQbz2K0Nyn+31j/w36fUfxkBW9/gfwRWdUY1ULReH3iokVJgddZAFcD1D0xlgTmFxJCbUkUclAlc6/IDJkw==" - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" - }, - "history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", - "requires": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" - } - }, - "hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "requires": { - "react-is": "^16.7.0" - } - }, - "html-tags": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.1.0.tgz", - "integrity": "sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg==" - }, - "indexes-of": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", - "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=" - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - }, - "is-core-module": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", - "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", - "requires": { - "has": "^1.0.3" - } - }, - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true - }, - "json-rpc-protocol": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/json-rpc-protocol/-/json-rpc-protocol-0.13.1.tgz", - "integrity": "sha512-gdb0TnNf0ITRjLm0QGNODgK1E6ORLhe+6L+RV8owg4X3d6x8jAfyHQC+xMG4T/qU2SPaNLpav29QRLC+3oF6gg==", - "requires": { - "make-error": "^1.3.0" - } - }, - "json5": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", - "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==" - }, - "lodash.toarray": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.toarray/-/lodash.toarray-4.4.0.tgz", - "integrity": "sha1-JMS/zWsvuji/0FlNsRedjptlZWE=" - }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, - "luxon": { - "version": "1.25.0", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-1.25.0.tgz", - "integrity": "sha512-hEgLurSH8kQRjY6i4YLey+mcKVAWXbDNlZRmM6AgWDJ1cY3atl8Ztf5wEY7VBReFbmGnwQPz7KYJblL8B2k0jQ==" - }, - "make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" - }, - "mini-create-react-context": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz", - "integrity": "sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==", - "requires": { - "@babel/runtime": "^7.12.1", - "tiny-warning": "^1.0.3" - } - }, - "mini-svg-data-uri": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.2.3.tgz", - "integrity": "sha512-zd6KCAyXgmq6FV1mR10oKXYtvmA9vRoB6xPSTUJTbFApCtkefDnYueVR1gkof3KcdLZo1Y8mjF2DFmQMIxsHNQ==" - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - }, - "modern-normalize": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/modern-normalize/-/modern-normalize-1.0.0.tgz", - "integrity": "sha512-1lM+BMLGuDfsdwf3rsgBSrxJwAZHFIrQ8YR61xIqdHo0uNKI9M52wNpHSrliZATJp51On6JD0AfRxd4YGSU0lw==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "nanoid": { - "version": "3.1.20", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", - "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==" - }, - "node-emoji": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.10.0.tgz", - "integrity": "sha512-Yt3384If5H6BYGVHiHwTL+99OzJKHhgp82S8/dktEK73T26BazdgZ4JZh92xSVtGNJvz9UbXdNAc5hcrXV42vw==", - "requires": { - "lodash.toarray": "^4.4.0" - } - }, - "node-releases": { - "version": "1.1.70", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.70.tgz", - "integrity": "sha512-Slf2s69+2/uAD79pVVQo8uSiC34+g8GWY8UH2Qtqv34ZfhYrxpYpfzs9Js9d6O0mbDmALuxaTlplnBTnSELcrw==", - "dev": true - }, - "normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" - }, - "object-hash": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.1.1.tgz", - "integrity": "sha512-VOJmgmS+7wvXf8CjbQmimtCnEx3IAoLxI3fp2fbWehxrWBcAQFbk+vcwb6vzR0VZv/eNCJ/27j151ZTwqW/JeQ==" - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "requires": { - "wrappy": "1" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" - }, - "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" - }, - "path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", - "requires": { - "isarray": "0.0.1" - } - }, - "postcss": { - "version": "8.2.5", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.5.tgz", - "integrity": "sha512-wMcb7BpDcm3gxQOQx46NDNT36Kk0Ao6PJLLI2ed5vehbbbxCEuslSQzbQ2sfSKy+gkYxhWcGWSeaK+gwm4KIZg==", - "requires": { - "colorette": "^1.2.1", - "nanoid": "^3.1.20", - "source-map": "^0.6.1" - }, - "dependencies": { - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - } - } - }, - "postcss-functions": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/postcss-functions/-/postcss-functions-3.0.0.tgz", - "integrity": "sha1-DpTQFERwCkgd4g3k1V+yZAVkJQ4=", - "requires": { - "glob": "^7.1.2", - "object-assign": "^4.1.1", - "postcss": "^6.0.9", - "postcss-value-parser": "^3.3.0" - }, - "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } - }, - "postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - } - } - }, - "postcss-js": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-3.0.3.tgz", - "integrity": "sha512-gWnoWQXKFw65Hk/mi2+WTQTHdPD5UJdDXZmX073EY/B3BWnYjO4F4t0VneTCnCGQ5E5GsCdMkzPaTXwl3r5dJw==", - "requires": { - "camelcase-css": "^2.0.1", - "postcss": "^8.1.6" - } - }, - "postcss-nested": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-5.0.3.tgz", - "integrity": "sha512-R2LHPw+u5hFfDgJG748KpGbJyTv7Yr33/2tIMWxquYuHTd9EXu27PYnKi7BxMXLtzKC0a0WVsqHtd7qIluQu/g==", - "requires": { - "postcss-selector-parser": "^6.0.4" - } - }, - "postcss-selector-parser": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.4.tgz", - "integrity": "sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw==", - "requires": { - "cssesc": "^3.0.0", - "indexes-of": "^1.0.1", - "uniq": "^1.0.1", - "util-deprecate": "^1.0.2" - } - }, - "postcss-value-parser": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz", - "integrity": "sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==" - }, - "pretty-hrtime": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz", - "integrity": "sha1-t+PqQkNaTJsnWdmeDyAesZWALuE=" - }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, - "purgecss": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/purgecss/-/purgecss-3.1.3.tgz", - "integrity": "sha512-hRSLN9mguJ2lzlIQtW4qmPS2kh6oMnA9RxdIYK8sz18QYqd6ePp4GNDl18oWHA1f2v2NEQIh51CO8s/E3YGckQ==", - "requires": { - "commander": "^6.0.0", - "glob": "^7.0.0", - "postcss": "^8.2.1", - "postcss-selector-parser": "^6.0.2" - } - }, - "react": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/react/-/react-17.0.1.tgz", - "integrity": "sha512-lG9c9UuMHdcAexXtigOZLX8exLWkW0Ku29qPRU8uhF2R9BN96dLCt0psvzPLlHc5OWkgymP3qwTRgbnw5BKx3w==", - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "react-dom": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.1.tgz", - "integrity": "sha512-6eV150oJZ9U2t9svnsspTMrWNyHc6chX0KzDeAOXftRa8bNeOKTTfCJ7KorIwenkHd2xqVTBTCZd79yk/lx/Ug==", - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1", - "scheduler": "^0.20.1" - } - }, - "react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "react-refresh": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.9.0.tgz", - "integrity": "sha512-Gvzk7OZpiqKSkxsQvO/mbTN1poglhmAV7gR/DdIrRrSMXraRQQlfikRJOr3Nb9GTMPC5kof948Zy6jJZIFtDvQ==", - "dev": true - }, - "react-router": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.2.0.tgz", - "integrity": "sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==", - "requires": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "hoist-non-react-statics": "^3.1.0", - "loose-envify": "^1.3.1", - "mini-create-react-context": "^0.4.0", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.2", - "react-is": "^16.6.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - } - }, - "react-router-dom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.2.0.tgz", - "integrity": "sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==", - "requires": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "loose-envify": "^1.3.1", - "prop-types": "^15.6.2", - "react-router": "5.2.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - } - }, - "reduce-css-calc": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz", - "integrity": "sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg==", - "requires": { - "css-unit-converter": "^1.1.1", - "postcss-value-parser": "^3.3.0" - }, - "dependencies": { - "postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" - } - } - }, - "regenerator-runtime": { - "version": "0.13.7", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz", - "integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==" - }, - "resolve": { - "version": "1.19.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz", - "integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==", - "requires": { - "is-core-module": "^2.1.0", - "path-parse": "^1.0.6" - } - }, - "resolve-pathname": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", - "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" - }, - "rollup": { - "version": "2.38.5", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.38.5.tgz", - "integrity": "sha512-VoWt8DysFGDVRGWuHTqZzT02J0ASgjVq/hPs9QcBOGMd7B+jfTr/iqMVEyOi901rE3xq+Deq66GzIT1yt7sGwQ==", - "dev": true, - "requires": { - "fsevents": "~2.3.1" - }, - "dependencies": { - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true - } - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "scheduler": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.1.tgz", - "integrity": "sha512-LKTe+2xNJBNxu/QhHvDR14wUXHRQbVY5ZOYpOGWRzhydZUqrLb2JBvLPY7cAqFmqrWuDED0Mjk7013SZiOz6Bw==", - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "requires": { - "is-arrayish": "^0.3.1" - } - }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "requires": { - "has-flag": "^3.0.0" - } - }, - "tailwindcss": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-2.0.2.tgz", - "integrity": "sha512-nO9JRE1pO7SF9RnYAl6g7uzeHdrmKAFqNjT9NtZUfxqimJZAOOLOEyIEUiMq12+xIc7mC2Ey3Vf90XjHpWKfbw==", - "requires": { - "@fullhuman/postcss-purgecss": "^3.0.0", - "bytes": "^3.0.0", - "chalk": "^4.1.0", - "color": "^3.1.3", - "detective": "^5.2.0", - "didyoumean": "^1.2.1", - "fs-extra": "^9.0.1", - "html-tags": "^3.1.0", - "lodash": "^4.17.20", - "modern-normalize": "^1.0.0", - "node-emoji": "^1.8.1", - "object-hash": "^2.0.3", - "postcss-functions": "^3", - "postcss-js": "^3.0.3", - "postcss-nested": "^5.0.1", - "postcss-selector-parser": "^6.0.4", - "postcss-value-parser": "^4.1.0", - "pretty-hrtime": "^1.0.3", - "reduce-css-calc": "^2.1.6", - "resolve": "^1.19.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "tiny-invariant": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.1.0.tgz", - "integrity": "sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==" - }, - "tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" - }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true - }, - "typescript": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.3.tgz", - "integrity": "sha512-B3ZIOf1IKeH2ixgHhj6la6xdwR9QrLC5d1VKeCSY4tvkqhF2eqd9O7txNlS0PO3GrBAFIdr3L1ndNwteUbZLYg==", - "dev": true - }, - "uniq": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=" - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "value-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", - "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" - }, - "vite": { - "version": "2.0.0-beta.65", - "resolved": "https://registry.npmjs.org/vite/-/vite-2.0.0-beta.65.tgz", - "integrity": "sha512-mdHNTP6fGeb8m8lWAM3UbSPw1+un1lUv0i4MQJcNiK2/P01RHIY02VjQeXBv3NemkExkgLji88LN9ySFMUQpIw==", - "dev": true, - "requires": { - "esbuild": "^0.8.34", - "fsevents": "~2.1.2", - "postcss": "^8.2.1", - "resolve": "^1.19.0", - "rollup": "^2.35.1" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" - } - } -} diff --git a/cli/daemon/dash/dashapp/package.json b/cli/daemon/dash/dashapp/package.json deleted file mode 100644 index 872e85d9db..0000000000 --- a/cli/daemon/dash/dashapp/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "dashapp", - "version": "0.0.0", - "scripts": { - "dev": "vite", - "build": "tsc && vite build", - "serve": "vite preview" - }, - "dependencies": { - "@headlessui/react": "^0.2.0", - "@tailwindcss/forms": "^0.2.1", - "codemirror": "^5.59.2", - "events": "^3.2.0", - "json-rpc-protocol": "^0.13.1", - "luxon": "^1.25.0", - "react": "^17.0.0", - "react-dom": "^17.0.0", - "react-router-dom": "^5.2.0", - "tailwindcss": "^2.0.2" - }, - "devDependencies": { - "@types/codemirror": "^0.0.108", - "@types/events": "^3.0.0", - "@types/luxon": "^1.25.1", - "@types/node": "^14.14.25", - "@types/react": "^17.0.0", - "@types/react-dom": "^17.0.0", - "@types/react-router-dom": "^5.1.7", - "@vitejs/plugin-react-refresh": "^1.1.0", - "autoprefixer": "^10.2.4", - "postcss": "^8.2.5", - "typescript": "^4.1.2", - "vite": "^2.0.0-beta.64" - } -} diff --git a/cli/daemon/dash/dashapp/postcss.config.js b/cli/daemon/dash/dashapp/postcss.config.js deleted file mode 100644 index 33ad091d26..0000000000 --- a/cli/daemon/dash/dashapp/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -} diff --git a/cli/daemon/dash/dashapp/public/favicon.ico b/cli/daemon/dash/dashapp/public/favicon.ico deleted file mode 100644 index f8a3d8683e..0000000000 Binary files a/cli/daemon/dash/dashapp/public/favicon.ico and /dev/null differ diff --git a/cli/daemon/dash/dashapp/src/App.tsx b/cli/daemon/dash/dashapp/src/App.tsx deleted file mode 100644 index 6f449b0844..0000000000 --- a/cli/daemon/dash/dashapp/src/App.tsx +++ /dev/null @@ -1,42 +0,0 @@ -import React, { useEffect, useRef, useState } from 'react' -import { - BrowserRouter as Router, - Route, Switch -} from "react-router-dom"; -import Client from '~lib/client/client'; -import JSONRPCConn from '~lib/client/jsonrpc'; -import AppList from '~p/AppList'; -import AppHome from '~p/AppHome'; -import { ConnContext } from '~lib/ctx'; -import AppAPI from '~p/AppAPI'; - -function App() { - const [conn, setConn] = useState(undefined) - const [err, setErr] = useState(undefined) - const mounted = useRef(true) - - useEffect(() => { - const client = new Client() - client.base.jsonrpc("/__encore").then( - conn => mounted.current && setConn(conn) - ).catch(err => mounted.current && setErr(err)) - return () => { conn?.close(); mounted.current = false } - }, []) - - if (err) return
Error: {err.message}
- if (!conn) return
Loading...
- - return ( - - - - - - - - - - ) -} - -export default App diff --git a/cli/daemon/dash/dashapp/src/components/Button.tsx b/cli/daemon/dash/dashapp/src/components/Button.tsx deleted file mode 100644 index 9369e961eb..0000000000 --- a/cli/daemon/dash/dashapp/src/components/Button.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import React, { FunctionComponent, MouseEventHandler } from "react"; - -export interface Props { - theme: "purple" | "purple:secondary" | "purple:border" | "white" | "red" | "red:secondary" | "gray" | "gray:border"; - size: "xxs" | "xs" | "sm" | "md" | "lg" | "xl"; - cls?: string; - disabled?: boolean; - onClick?: MouseEventHandler; - type?: "button" | "submit"; -} - -const sizeClasses = { - "xxs": "px-1 py-0.5 text-xs leading-4 rounded", - "xs": "px-2.5 py-1.5 text-xs leading-4 rounded", - "sm": "px-3 py-2 text-sm leading-4 rounded-md", - "md": "px-4 py-2 text-sm leading-5 rounded-md", - "lg": "px-4 py-2 text-base leading-6 rounded-md", - "xl": "px-6 py-3 text-base leading-6 rounded-md", -} - -const enabledClasses = { - "purple": "border-transparent text-white bg-purple-600 hover:bg-purple-500 focus:outline-none focus:border-purple-700 focus:shadow-outline-purple active:bg-purple-700", - "purple:secondary": "border-transparent text-purple-700 bg-purple-100 hover:bg-purple-50 focus:outline-none focus:border-purple-300 focus:shadow-outline-purple active:bg-purple-200", - "purple:border": "border-purple-600 text-purple-700 bg-white hover:text-purple-500 hover:bg-purple-50 focus:outline-none focus:border-purple-500 focus:shadow-outline-purple active:text-purple-800 active:bg-gray-50", - "white": "border-gray-300 text-gray-700 bg-white hover:text-gray-500 focus:outline-none focus:border-purple-300 focus:shadow-outline-purple active:text-gray-800 active:bg-gray-50", - "red": "border-transparent bg-red-600 text-white hover:bg-red-500 focus:outline-none focus:border-red-700 focus:shadow-outline-red active:bg-red-700", - "red:secondary": "border-red-600 text-red-700 bg-white hover:text-white hover:bg-red-600 focus:outline-none focus:border-red-500 focus:shadow-outline-red active:text-white active:bg-red-600", - "gray": "border-transparent text-white bg-gray-700 hover:bg-gray-600 focus:outline-none active:bg-gray-800", - "gray:border": "border-gray-700 text-gray-800 bg-white hover:text-gray-600 hover:bg-gray-50 focus:outline-none focus:border-gray-600 active:text-gray-800 active:bg-gray-50", -} - -const disabledClasses = { - "purple": "border-transparent text-white bg-purple-500 opacity-50 cursor-not-allowed focus:outline-none", - "purple:secondary": "border-transparent text-purple-700 bg-purple-100 opacity-50 cursor-not-allowed focus:outline-none", - "purple:border": "border-gray-200 text-gray-600 bg-white opacity-50 cursor-not-allowed focus:outline-none", - "white": "border-gray-200 text-gray-600 bg-white opacity-50 cursor-not-allowed focus:outline-none", - "red": "border-transparent text-white bg-red-500 opacity-50 cursor-not-allowed focus:outline-none", - "red:secondary": "border-gray-200 text-red-800 bg-white opacity-50 cursor-not-allowed focus:outline-none", - "gray": "border-transparent text-white bg-gray-500 opacity-50 cursor-not-allowed focus:outline-none", - "gray:border": "border-gray-200 text-gray-600 bg-white opacity-50 cursor-not-allowed focus:outline-none", -} - -const Button: FunctionComponent = (props) => { - const baseCls = "inline-flex justify-center items-center border font-medium transition duration-150 ease-in-out" - const cls = `${baseCls} ${props.disabled ? disabledClasses[props.theme] : enabledClasses[props.theme]} ${sizeClasses[props.size]} ${props.cls || ""}` - return -} - -export default Button \ No newline at end of file diff --git a/cli/daemon/dash/dashapp/src/components/Input.tsx b/cli/daemon/dash/dashapp/src/components/Input.tsx deleted file mode 100644 index 4e5a050b7a..0000000000 --- a/cli/daemon/dash/dashapp/src/components/Input.tsx +++ /dev/null @@ -1,318 +0,0 @@ -import React, { FunctionComponent, useRef, useState, useEffect } from 'react' - -type Type = "text" | "number" | "email" | "password"; - -export interface InputProps { - id: string; - value: string; - type?: Type; - onChange?: (value: string) => void; - - required?: boolean; - label?: string; - desc?: string; - htmlDesc?: string; - placeholder?: string; - error?: string; - prefix?: string; - cls?: string; - disabled?: boolean; -} - - -const Input: FunctionComponent = (props: InputProps) => { - const typ = props.type || "text" - const onChange = (event: React.ChangeEvent) => { - if (props.onChange) { - props.onChange(event.target.value) - } - } - - const extraCls = props.disabled ? "bg-gray-100 text-gray-600" : "" - - return ( -
- {props.label && - - } - - {props.error ? ( - <> - {props.prefix ? ( -
- - {props.prefix} - - -
- - - -
-
- ) : ( -
- -
- - - -
-
- )} -

{props.error}

- - ) : ( - <> - {props.prefix ? ( -
- - {props.prefix} - - -
- ) : ( -
- -
- )} - - {props.desc ? ( -

{props.desc}

- ) : props.htmlDesc ? ( -

- ) : null} - - )} -

- ) -} - -export interface TextAreaProps { - id: string; - value: string; - onChange?: (value: string) => void; - - rows?: number; - required?: boolean; - label?: string; - desc?: string; - htmlDesc?: string; - placeholder?: string; - error?: string; - cls?: string; - disabled?: boolean; -} - - -export const TextArea: FunctionComponent = (props) => { - const onChange = (event: React.ChangeEvent) => { - if (props.onChange) { - props.onChange(event.target.value) - } - } - - const extraCls = props.disabled ? "bg-gray-100 text-gray-600" : "" - - return ( -
- {props.label && - - } - - {props.error ? ( - <> -
-