diff --git a/.github/workflows/beacon_tests.yml b/.github/workflows/beacon_tests.yml new file mode 100644 index 000000000..bf2f8fae8 --- /dev/null +++ b/.github/workflows/beacon_tests.yml @@ -0,0 +1,46 @@ +name: Beacon Tests +defaults: + run: + shell: bash + working-directory: ./beacon +on: + pull_request: + paths: + - "beacon/**" + + push: + branches: + - main + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + MIX_ENV: test + +jobs: + tests: + name: Tests & Lint + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Setup elixir + id: beam + uses: erlef/setup-beam@v1 + with: + otp-version: 27.x # Define the OTP version [required] + elixir-version: 1.18.x # Define the elixir version [required] + - name: Install dependencies + run: mix deps.get + - name: Start epmd + run: epmd -daemon + - name: Run tests + run: MIX_ENV=test mix test + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Check for warnings + run: mix compile --force --warnings-as-errors + - name: Run format check + run: mix format --check-formatted diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 000000000..c3bac6326 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,20 @@ +name: Docker Build + +on: + pull_request: + branches: + - main + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image + run: docker build . diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 000000000..3981300df --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,33 @@ +name: Integration Tests +on: + pull_request: + paths: + - "lib/**" + - "test/**" + - "config/**" + - "priv/**" + - "assets/**" + - "rel/**" + - "mix.exs" + - "Dockerfile" + - "run.sh" + - "docker-compose.test.yml" + + push: + branches: + - main + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + tests: + name: Tests + runs-on: blacksmith-8vcpu-ubuntu-2404 + + steps: + - uses: actions/checkout@v2 + - name: Run integration test + run: docker compose -f docker-compose.tests.yml up --abort-on-container-exit --exit-code-from test-runner + diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 000000000..ba9b5cf30 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,80 @@ +name: Lint +on: + pull_request: + paths: + - "lib/**" + - "test/**" + - "config/**" + - "priv/**" + - "assets/**" + - "rel/**" + - "mix.exs" + - "Dockerfile" + - "run.sh" + + push: + branches: + - main + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + tests: + name: Lint + runs-on: blacksmith-4vcpu-ubuntu-2404 + + steps: + - uses: actions/checkout@v2 + - name: Setup elixir + id: beam + uses: erlef/setup-beam@v1 + with: + otp-version: 27.x # Define the OTP version [required] + elixir-version: 1.18.x # Define the elixir version [required] + - name: Cache Mix + uses: actions/cache@v4 + with: + path: | + deps + _build + key: ${{ github.workflow }}-${{ runner.os }}-mix-${{ env.elixir }}-${{ env.otp }}-${{ hashFiles('**/mix.lock') }} + restore-keys: | + ${{ github.workflow }}-${{ runner.os }}-mix-${{ env.elixir }}-${{ env.otp }}- + + - name: Install dependencies + run: mix deps.get + - name: Set up Postgres + run: docker compose -f docker-compose.dbs.yml up -d + - name: Run main database migrations + run: mix ecto.migrate --log-migrator-sql + - name: Run database tenant migrations + run: mix ecto.migrate --migrations-path lib/realtime/tenants/repo/migrations + - name: Check for warnings + run: mix compile --force --warnings-as-errors + - name: Run format check + run: mix format --check-formatted + - name: Credo checks + run: mix credo + - name: Run hex audit + run: mix hex.audit + - name: Run mix_audit + run: mix deps.audit + - name: Run sobelow + run: mix sobelow --config .sobelow-conf + - name: Retrieve PLT Cache + uses: actions/cache@v4 + id: plt-cache + with: + path: priv/plts + key: ${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-plts-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} + - name: Create PLTs + if: steps.plt-cache.outputs.cache-hit != 'true' + run: | + mkdir -p priv/plts + mix dialyzer.build + - name: Run dialyzer + run: mix dialyzer + - name: Run dev seeds + run: DB_ENC_KEY="1234567890123456" mix ecto.setup diff --git a/.github/workflows/manual_prod_build.yml b/.github/workflows/manual_prod_build.yml index f5014dd24..a57a46f55 100644 --- a/.github/workflows/manual_prod_build.yml +++ b/.github/workflows/manual_prod_build.yml @@ -10,7 +10,7 @@ on: required: true jobs: docker_x86_release: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 timeout-minutes: 120 env: arch: amd64 @@ -25,7 +25,8 @@ jobs: tags: | type=raw,value=v${{ github.event.inputs.docker_tag }}_${{ env.arch }} - - uses: docker/setup-buildx-action@v2 + - name: Setup Blacksmith Builder + uses: useblacksmith/setup-docker-builder@v1 - uses: docker/login-action@v2 with: @@ -33,13 +34,11 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - id: build - uses: docker/build-push-action@v3 + uses: useblacksmith/build-push-action@v2 with: push: true tags: ${{ steps.meta.outputs.tags }} platforms: linux/${{ env.arch }} - cache-from: type=gha - cache-to: type=gha,mode=max docker_arm_release: runs-on: arm-runner @@ -64,15 +63,11 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - uses: docker/setup-buildx-action@v2 - with: - driver: docker - driver-opts: | - image=moby/buildkit:master - network=host + - name: Setup Blacksmith Builder + uses: useblacksmith/setup-docker-builder@v1 - id: build - uses: docker/build-push-action@v3 + uses: useblacksmith/build-push-action@v2 with: context: . push: true @@ -82,13 +77,14 @@ jobs: merge_manifest: needs: [docker_x86_release, docker_arm_release] - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 permissions: contents: read packages: write id-token: write steps: - - uses: docker/setup-buildx-action@v2 + - name: Setup Blacksmith Builder + uses: useblacksmith/setup-docker-builder@v1 - uses: docker/login-action@v2 with: diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml index 8fc83fe45..6149f28d7 100644 --- a/.github/workflows/mirror.yml +++ b/.github/workflows/mirror.yml @@ -10,7 +10,7 @@ on: jobs: mirror: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 permissions: contents: read packages: write diff --git a/.github/workflows/prod_build.yml b/.github/workflows/prod_build.yml index 9926c1c03..22c1b2899 100644 --- a/.github/workflows/prod_build.yml +++ b/.github/workflows/prod_build.yml @@ -15,7 +15,7 @@ on: jobs: release: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 outputs: published: ${{ steps.semantic.outputs.new_release_published }} version: ${{ steps.semantic.outputs.new_release_version }} @@ -30,7 +30,7 @@ jobs: docker_x86_release: needs: release - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 if: needs.release.outputs.published == 'true' timeout-minutes: 120 env: @@ -47,7 +47,8 @@ jobs: type=raw,value=v${{ needs.release.outputs.version }}_${{ env.arch }} type=raw,value=latest_${{ env.arch }} - - uses: docker/setup-buildx-action@v2 + - name: Setup Blacksmith Builder + uses: useblacksmith/setup-docker-builder@v1 - uses: docker/login-action@v2 with: @@ -55,13 +56,11 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - id: build - uses: docker/build-push-action@v3 + uses: useblacksmith/build-push-action@v2 with: push: true tags: ${{ steps.meta.outputs.tags }} platforms: linux/${{ env.arch }} - cache-from: type=gha - cache-to: type=gha,mode=max docker_arm_release: needs: release @@ -89,15 +88,11 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - uses: docker/setup-buildx-action@v2 - with: - driver: docker - driver-opts: | - image=moby/buildkit:master - network=host + - name: Setup Blacksmith Builder + uses: useblacksmith/setup-docker-builder@v1 - id: build - uses: docker/build-push-action@v3 + uses: useblacksmith/build-push-action@v2 with: context: . push: true @@ -107,13 +102,14 @@ jobs: merge_manifest: needs: [release, docker_x86_release, docker_arm_release] - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 permissions: contents: read packages: write id-token: write steps: - - uses: docker/setup-buildx-action@v2 + - name: Setup Blacksmith Builder + uses: useblacksmith/setup-docker-builder@v1 - uses: docker/login-action@v2 with: @@ -160,7 +156,7 @@ jobs: update-branch-name: needs: [release, docker_x86_release, docker_arm_release, merge_manifest] - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - name: Checkout branch uses: actions/checkout@v2 diff --git a/.github/workflows/prod_linter.yml b/.github/workflows/prod_linter.yml index 6af6b5ed8..243493034 100644 --- a/.github/workflows/prod_linter.yml +++ b/.github/workflows/prod_linter.yml @@ -7,7 +7,7 @@ on: jobs: format: name: Formatting Checks - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v2 @@ -15,8 +15,8 @@ jobs: id: beam uses: erlef/setup-beam@v1 with: - otp-version: 26.x # Define the OTP version [required] - elixir-version: 1.16.x # Define the elixir version [required] + otp-version: 27.x # Define the OTP version [required] + elixir-version: 1.18.x # Define the elixir version [required] - name: Cache Mix uses: actions/cache@v4 with: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5d3818814..a0f982760 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -16,10 +16,17 @@ on: branches: - main +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + MIX_ENV: test + jobs: tests: name: Tests - runs-on: ubuntu-latest + runs-on: blacksmith-8vcpu-ubuntu-2404 steps: - uses: actions/checkout@v2 @@ -28,48 +35,23 @@ jobs: uses: erlef/setup-beam@v1 with: otp-version: 27.x # Define the OTP version [required] - elixir-version: 1.17.x # Define the elixir version [required] + elixir-version: 1.18.x # Define the elixir version [required] - name: Cache Mix uses: actions/cache@v4 with: - path: deps - key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} + path: | + deps + _build + key: ${{ github.workflow }}-${{ runner.os }}-mix-${{ env.elixir }}-${{ env.otp }}-${{ hashFiles('**/mix.lock') }} restore-keys: | - ${{ runner.os }}-mix- + ${{ github.workflow }}-${{ runner.os }}-mix-${{ env.elixir }}-${{ env.otp }}- + - name: Pull postgres image quietly in background (used by test/support/containers.ex) + run: docker pull supabase/postgres:15.8.1.040 > /dev/null 2>&1 & - name: Install dependencies run: mix deps.get - name: Set up Postgres run: docker compose -f docker-compose.dbs.yml up -d - - name: Run main database migrations - run: mix ecto.migrate --log-migrator-sql - - name: Run database tenant migrations - run: mix ecto.migrate --migrations-path lib/realtime/tenants/repo/migrations - - name: Run format check - run: mix format --check-formatted - - name: Credo checks - run: mix credo - - name: Run hex audit - run: mix hex.audit - - name: Run mix_audit - run: mix deps.audit - - name: Run sobelow - run: mix sobelow --config .sobelow-conf - - name: Retrieve PLT Cache - uses: actions/cache@v4 - id: plt-cache - with: - path: priv/plts - key: ${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-plts-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} - - name: Create PLTs - if: steps.plt-cache.outputs.cache-hit != 'true' - run: | - mkdir -p priv/plts - mix dialyzer.build - - name: Run dialyzer - run: mix dialyzer - - name: Run dev seeds - run: DB_ENC_KEY="1234567890123456" mix ecto.setup - name: Start epmd run: epmd -daemon - name: Run tests diff --git a/.github/workflows/version_updated.yml b/.github/workflows/version_updated.yml index 6125f1ff7..ba6d340ad 100644 --- a/.github/workflows/version_updated.yml +++ b/.github/workflows/version_updated.yml @@ -20,7 +20,7 @@ name: Default Checks jobs: versions_updated: name: Versions Updated - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - name: Checkout code uses: actions/checkout@v3 diff --git a/.tool-versions b/.tool-versions index 35b41200e..70a472465 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,3 +1,3 @@ -elixir 1.17.3 -nodejs 18.13.0 -erlang 27.1 +elixir 1.18.4-otp-27 +nodejs 24 +erlang 27 diff --git a/Dockerfile b/Dockerfile index 33da5983f..6eb90206b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ -ARG ELIXIR_VERSION=1.17.3 -ARG OTP_VERSION=27.1.2 -ARG DEBIAN_VERSION=bookworm-20241111-slim +ARG ELIXIR_VERSION=1.18 +ARG OTP_VERSION=27.3 +ARG DEBIAN_VERSION=bookworm-20250929-slim ARG BUILDER_IMAGE="hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}" ARG RUNNER_IMAGE="debian:${DEBIAN_VERSION}" @@ -19,7 +19,7 @@ RUN set -uex; \ mkdir -p /etc/apt/keyrings; \ curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key \ | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg; \ - NODE_MAJOR=18; \ + NODE_MAJOR=24; \ echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" \ > /etc/apt/sources.list.d/nodesource.list; \ apt-get -qy update; \ @@ -34,6 +34,7 @@ RUN mix local.hex --force && \ # install mix dependencies COPY mix.exs mix.lock ./ +COPY beacon beacon RUN mix deps.get --only $MIX_ENV RUN mkdir config diff --git a/Makefile b/Makefile index fd7f0f7fd..1259a1335 100644 --- a/Makefile +++ b/Makefile @@ -9,10 +9,10 @@ PORT ?= 4000 # Common commands dev: ## Start a dev server - ELIXIR_ERL_OPTIONS="+hmax 1000000000" SLOT_NAME_SUFFIX=some_sha PORT=$(PORT) MIX_ENV=dev SECURE_CHANNELS=true API_JWT_SECRET=dev METRICS_JWT_SECRET=dev REGION=fra DB_ENC_KEY="1234567890123456" CLUSTER_STRATEGIES=$(CLUSTER_STRATEGIES) ERL_AFLAGS="-kernel shell_history enabled" GEN_RPC_TCP_SERVER_PORT=5369 GEN_RPC_TCP_CLIENT_PORT=5469 iex --name $(NODE_NAME)@127.0.0.1 --cookie cookie -S mix phx.server + ELIXIR_ERL_OPTIONS="+hmax 1000000000" SLOT_NAME_SUFFIX=some_sha PORT=$(PORT) MIX_ENV=dev SECURE_CHANNELS=true API_JWT_SECRET=dev METRICS_JWT_SECRET=dev REGION=us-east-1 DB_ENC_KEY="1234567890123456" CLUSTER_STRATEGIES=$(CLUSTER_STRATEGIES) ERL_AFLAGS="-kernel shell_history enabled" GEN_RPC_TCP_SERVER_PORT=5369 GEN_RPC_TCP_CLIENT_PORT=5469 iex --name $(NODE_NAME)@127.0.0.1 --cookie cookie -S mix phx.server dev.orange: ## Start another dev server (orange) on port 4001 - ELIXIR_ERL_OPTIONS="+hmax 1000000000" SLOT_NAME_SUFFIX=some_sha PORT=4001 MIX_ENV=dev SECURE_CHANNELS=true API_JWT_SECRET=dev METRICS_JWT_SECRET=dev DB_ENC_KEY="1234567890123456" CLUSTER_STRATEGIES=$(CLUSTER_STRATEGIES) ERL_AFLAGS="-kernel shell_history enabled" GEN_RPC_TCP_SERVER_PORT=5469 GEN_RPC_TCP_CLIENT_PORT=5369 iex --name orange@127.0.0.1 --cookie cookie -S mix phx.server + ELIXIR_ERL_OPTIONS="+hmax 1000000000" SLOT_NAME_SUFFIX=some_sha PORT=4001 MIX_ENV=dev SECURE_CHANNELS=true API_JWT_SECRET=dev METRICS_JWT_SECRET=dev REGION=eu-west-1 DB_ENC_KEY="1234567890123456" CLUSTER_STRATEGIES=$(CLUSTER_STRATEGIES) ERL_AFLAGS="-kernel shell_history enabled" GEN_RPC_TCP_SERVER_PORT=5469 GEN_RPC_TCP_CLIENT_PORT=5369 iex --name orange@127.0.0.1 --cookie cookie -S mix phx.server seed: ## Seed the database DB_ENC_KEY="1234567890123456" FLY_ALLOC_ID=123e4567-e89b-12d3-a456-426614174000 mix run priv/repo/dev_seeds.exs diff --git a/README.md b/README.md index 2235bf388..633c19cbd 100644 --- a/README.md +++ b/README.md @@ -24,14 +24,14 @@ ## Status -![GitHub License](https://img.shields.io/github/license/supabase/realtime) +[![GitHub License](https://img.shields.io/github/license/supabase/realtime)](https://github.com/supabase/realtime/blob/main/LICENSE) [![Coverage Status](https://coveralls.io/repos/github/supabase/realtime/badge.svg?branch=main)](https://coveralls.io/github/supabase/realtime?branch=main) | Features | v1 | v2 | Status | | ---------------- | --- | --- | ------ | | Postgres Changes | ✔ | ✔ | GA | -| Broadcast | | ✔ | Beta | -| Presence | | ✔ | Beta | +| Broadcast | | ✔ | GA | +| Presence | | ✔ | GA | This repository focuses on version 2 but you can still access the previous version's [code](https://github.com/supabase/realtime/tree/v1) and [Docker image](https://hub.docker.com/layers/supabase/realtime/v1.0.0/images/sha256-e2766e0e3b0d03f7e9aa1b238286245697d0892c2f6f192fd2995dca32a4446a). For the latest Docker images go to https://hub.docker.com/r/supabase/realtime. @@ -55,12 +55,14 @@ The server does not guarantee that every message will be delivered to your clien ## Quick start -You can check out the [Multiplayer demo](https://multiplayer.dev) that features Broadcast, Presence and Postgres Changes under the demo directory: https://github.com/supabase/realtime/tree/main/demo. +You can check out the [Supabase UI Library](https://supabase.com/ui) Realtime components and the [multiplayer.dev](https://multiplayer.dev) demo app source code [here](https://github.com/supabase/multiplayer.dev) ## Client libraries -- JavaScript: [@supabase/realtime-js](https://github.com/supabase/realtime-js) -- Dart: [@supabase/realtime-dart](https://github.com/supabase/realtime-dart) +- [JavaScript](https://github.com/supabase/supabase-js/tree/master/packages/core/realtime-js) +- [Flutter/Dart](https://github.com/supabase/supabase-flutter/tree/main/packages/realtime_client) +- [Python](https://github.com/supabase/supabase-py/tree/main/src/realtime) +- [Swift](https://github.com/supabase/supabase-swift/tree/main/Sources/Realtime) ## Server Setup @@ -94,7 +96,7 @@ You can add your own by making a `POST` request to the server. You must change b "region": "us-west-1", "poll_interval_ms": 100, "poll_max_record_bytes": 1048576, - "ssl_enforced": false + "ssl_enforced": false } } ] @@ -169,6 +171,7 @@ If you're using the default tenant, the URL is `ws://realtime-dev.localhost:4000 | CONNECT_PARTITION_SLOTS | number | Number of dynamic supervisor partitions used by the Connect, ReplicationConnect processes | | METRICS_CLEANER_SCHEDULE_TIMER_IN_MS | number | Time in ms to run the Metric Cleaner task | | METRICS_RPC_TIMEOUT_IN_MS | number | Time in ms to wait for RPC call to fetch Metric per node | +| WEBSOCKET_MAX_HEAP_SIZE | number | Max number of bytes to be allocated as heap for the WebSocket transport process. If the limit is reached the process is brutally killed. Defaults to 50MB. | | REQUEST_ID_BAGGAGE_KEY | string | OTEL Baggage key to be used as request id | | OTEL_SDK_DISABLED | boolean | Disable OpenTelemetry tracing completely when 'true' | | OTEL_TRACES_EXPORTER | string | Possible values: `otlp` or `none`. See [https://github.com/open-telemetry/opentelemetry-erlang/tree/v1.4.0/apps#os-environment] for more details on how to configure the traces exporter. | @@ -190,6 +193,9 @@ If you're using the default tenant, the URL is `ws://realtime-dev.localhost:4000 | MAX_GEN_RPC_CLIENTS | number | Max amount of `gen_rpc` TCP connections per node-to-node channel | | REBALANCE_CHECK_INTERVAL_IN_MS | number | Time in ms to check if process is in the right region | | DISCONNECT_SOCKET_ON_NO_CHANNELS_INTERVAL_IN_MS | number | Time in ms to check if a socket has no channels open and if so, disconnect it | +| BROADCAST_POOL_SIZE | number | Number of processes to relay Phoenix.PubSub messages across the cluster | +| POSTGRES_CDC_SCOPE_SHARDS | number | Number of dynamic supervisor partitions used by the Postgres CDC extension. Defaults to 5. | +| USERS_SCOPE_SHARDS | number | Number of dynamic supervisor partitions used by the Users extension. Defaults to 5. | The OpenTelemetry variables mentioned above are not an exhaustive list of all [supported environment variables](https://opentelemetry.io/docs/languages/sdk-configuration/). @@ -240,6 +246,7 @@ This is the list of operational codes that can help you understand your deployme | ChannelRateLimitReached | The number of channels you can create has reached its limit | | ConnectionRateLimitReached | The number of connected clients as reached its limit | | ClientJoinRateLimitReached | The rate of joins per second from your clients has reached the channel limits | +| DatabaseConnectionRateLimitReached | The rate of attempts to connect to tenants database has reached the limit | | MessagePerSecondRateLimitReached | The rate of messages per second from your clients has reached the channel limits | | RealtimeDisabledForTenant | Realtime has been disabled for the tenant | | UnableToConnectToTenantDatabase | Realtime was not able to connect to the tenant's database | @@ -284,6 +291,7 @@ This is the list of operational codes that can help you understand your deployme | UnknownErrorOnController | An error we are not handling correctly was triggered on a controller | | UnknownErrorOnChannel | An error we are not handling correctly was triggered on a channel | | PresenceRateLimitReached | Limit of presence events reached | +| UnableToReplayMessages | An error while replaying messages | ## License diff --git a/assets/js/app.js b/assets/js/app.js index 9b19c27f5..858de8831 100644 --- a/assets/js/app.js +++ b/assets/js/app.js @@ -8,7 +8,7 @@ import { createClient } from "@supabase/supabase-js"; // LiveView is managing this page because we have Phoenix running // We're using LiveView to handle the Realtime client via LiveView Hooks -let Hooks = {}; +const Hooks = {}; Hooks.payload = { initRealtime( channelName, @@ -24,8 +24,6 @@ Hooks.payload = { private_channel ) { // Instantiate our client with the Realtime server and params to connect with - { - } const opts = { realtime: { params: { @@ -36,17 +34,20 @@ Hooks.payload = { this.realtimeSocket = createClient(host, token, opts); - if (bearer != "") { + if (bearer !== "") { this.realtimeSocket.realtime.setAuth(bearer); } - private_channel = private_channel == "true"; + private_channel = private_channel === "true"; // Join the Channel 'any' // Channels can be named anything // All clients on the same Channel will get messages sent to that Channel this.channel = this.realtimeSocket.channel(channelName, { - config: { broadcast: { self: true, private: private_channel } }, + config: { + broadcast: { self: true }, + private: private_channel, + }, }); // Hack to confirm Postgres is subscribed @@ -55,13 +56,13 @@ Hooks.payload = { if (payload.extension === "postgres_changes" && payload.status === "ok") { this.pushEventTo("#conn_info", "postgres_subscribed", {}); } - let ts = new Date(); - let line = ` + const ts = new Date(); + const line = ` SYSTEM ${ts.toISOString()} ${JSON.stringify(payload)} `; - let list = document.querySelector("#plist"); + const list = document.querySelector("#plist"); list.innerHTML = line + list.innerHTML; }); @@ -69,13 +70,13 @@ Hooks.payload = { // The event name can by anything // Match on specific event names to filter for only those types of events and do something with them this.channel.on("broadcast", { event: "*" }, (payload) => { - let ts = new Date(); - let line = ` + const ts = new Date(); + const line = ` BROADCAST ${ts.toISOString()} ${JSON.stringify(payload)} `; - let list = document.querySelector("#plist"); + const list = document.querySelector("#plist"); list.innerHTML = line + list.innerHTML; }); @@ -85,29 +86,33 @@ Hooks.payload = { this.channel.on("presence", { event: "*" }, (payload) => { this.pushEventTo("#conn_info", "presence_subscribed", {}); - let ts = new Date(); - let line = ` + const ts = new Date(); + const line = ` PRESENCE ${ts.toISOString()} ${JSON.stringify(payload)} `; - let list = document.querySelector("#plist"); + const list = document.querySelector("#plist"); list.innerHTML = line + list.innerHTML; }); } // Listen for all (`*`) `postgres_changes` events on tables in the `public` schema if (enable_db_changes === "true") { - let postgres_changes_opts = { event: "*", schema: schema, table: table }; + const postgres_changes_opts = { + event: "*", + schema: schema, + table: table, + }; if (filter !== "") { postgres_changes_opts.filter = filter; } this.channel.on("postgres_changes", postgres_changes_opts, (payload) => { - let ts = performance.now() + performance.timeOrigin; - let iso_ts = new Date(); - let payload_ts = Date.parse(payload.commit_timestamp); - let latency = ts - payload_ts; - let line = ` + const ts = performance.now() + performance.timeOrigin; + const iso_ts = new Date(); + const payload_ts = Date.parse(payload.commit_timestamp); + const latency = ts - payload_ts; + const line = ` POSTGRES ${iso_ts.toISOString()} @@ -117,7 +122,7 @@ Hooks.payload = { )} ms `; - let list = document.querySelector("#plist"); + const list = document.querySelector("#plist"); list.innerHTML = line + list.innerHTML; }); } @@ -178,10 +183,9 @@ Hooks.payload = { // } if (enable_presence === "true") { const name = "user_name_" + Math.floor(Math.random() * 100); - this.channel.send({ - type: "presence", - event: "TRACK", - payload: { name: name, t: performance.now() }, + await this.channel.track({ + name: name, + t: performance.now(), }); } } else { @@ -214,7 +218,7 @@ Hooks.payload = { }, mounted() { - let params = { + const params = { log_level: localStorage.getItem("log_level"), token: localStorage.getItem("token"), host: localStorage.getItem("host"), @@ -250,9 +254,9 @@ Hooks.payload = { this.sendRealtime(message.event, message.payload) ); - this.handleEvent("disconnect", ({}) => this.disconnectRealtime()); + this.handleEvent("disconnect", () => this.disconnectRealtime()); - this.handleEvent("clear_local_storage", ({}) => this.clearLocalStorage()); + this.handleEvent("clear_local_storage", () => this.clearLocalStorage()); }, }; @@ -266,18 +270,18 @@ Hooks.latency = { }, }; -let csrfToken = document +const csrfToken = document .querySelector("meta[name='csrf-token']") .getAttribute("content"); -let liveSocket = new LiveSocket("/live", Socket, { +const liveSocket = new LiveSocket("/live", Socket, { hooks: Hooks, params: { _csrf_token: csrfToken }, }); topbar.config({ barColors: { 0: "#29d" }, shadowColor: "rgba(0, 0, 0, .3)" }); -window.addEventListener("phx:page-loading-start", (info) => topbar.show()); -window.addEventListener("phx:page-loading-stop", (info) => topbar.hide()); +window.addEventListener("phx:page-loading-start", () => topbar.show()); +window.addEventListener("phx:page-loading-stop", () => topbar.hide()); liveSocket.connect(); diff --git a/assets/package.json b/assets/package.json index b718d4593..bf079a32c 100644 --- a/assets/package.json +++ b/assets/package.json @@ -1,5 +1,5 @@ { "dependencies": { - "@supabase/supabase-js": "^2.50.0" + "@supabase/supabase-js": "^2.85.0" } } \ No newline at end of file diff --git a/beacon/.formatter.exs b/beacon/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/beacon/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/beacon/.gitignore b/beacon/.gitignore new file mode 100644 index 000000000..65fb2f5eb --- /dev/null +++ b/beacon/.gitignore @@ -0,0 +1,23 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +beacon-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/beacon/README.md b/beacon/README.md new file mode 100644 index 000000000..b89093b1e --- /dev/null +++ b/beacon/README.md @@ -0,0 +1,60 @@ +# Beacon + +Beacon is a scalable process group manager. The main use case for this library is to have membership counts available on the cluster without spamming whenever a process joins or leaves a group. A node can have thousands of processes joining and leaving hundreds of groups while sending just the membership count to other nodes. + +The main features are: + +* Process pids are available only to the node the where the processes reside; +* Groups are partitioned locally to allow greater concurrency while joining different groups; +* Group counts are periodically broadcasted (defaults to every 5 seconds) to update group membership numbers to all participating nodes; +* Sub-cluster nodes join by using same scope; + +## Installation + +The package can be installed by adding `beacon` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:beacon, "~> 1.0"} + ] +end +``` + +## Using + +Add Beacon to your application's supervision tree specifying a scope name (here it's `:users`) + +```elixir +def start(_type, _args) do + children = + [ + {Beacon, :users}, + # Or passing options: + # {Beacon, [:users, opts]} + # See Beacon.start_link/2 for the options +``` + +Now process can join groups + +```elixir +iex> pid = self() +#PID<0.852.0> +iex> Beacon.join(:users, {:tenant, 123}, pid) +:ok +iex> Beacon.local_member_count(:users, {:tenant, 123}) +1 +iex> Beacon.local_members(:users, {:tenant, 123}) +[#PID<0.852.0>] +iex> Beacon.local_member?(:users, {:tenant, 123}, pid) +true +``` + +From another node part of the same scope: + +```elixir +iex> Beacon.member_counts(:users) +%{{:tenant, 123} => 1} +iex> Beacon.member_count(:users, {:tenant, 123}) +1 +``` diff --git a/beacon/config/config.exs b/beacon/config/config.exs new file mode 100644 index 000000000..e17c52707 --- /dev/null +++ b/beacon/config/config.exs @@ -0,0 +1,4 @@ +import Config + +# Print nothing during tests unless captured or a test failure happens +config :logger, backends: [], level: :debug diff --git a/beacon/lib/beacon.ex b/beacon/lib/beacon.ex new file mode 100644 index 000000000..ba8e7987c --- /dev/null +++ b/beacon/lib/beacon.ex @@ -0,0 +1,153 @@ +defmodule Beacon do + @moduledoc """ + Distributed process group membership tracking. + """ + + alias Beacon.Partition + alias Beacon.Scope + + @type group :: any + @type start_option :: + {:partitions, pos_integer()} | {:broadcast_interval_in_ms, non_neg_integer()} + + @doc "Returns a supervisor child specification for a Beacon scope" + def child_spec([scope]) when is_atom(scope), do: child_spec([scope, []]) + def child_spec(scope) when is_atom(scope), do: child_spec([scope, []]) + + def child_spec([scope, opts]) when is_atom(scope) and is_list(opts) do + %{ + id: Beacon, + start: {__MODULE__, :start_link, [scope, opts]}, + type: :supervisor + } + end + + @doc """ + Starts the Beacon supervision tree for `scope`. + + Options: + + * `:partitions` - number of partitions to use (default: number of schedulers online) + * `:broadcast_interval_in_ms`: - interval in milliseconds to broadcast membership counts to other nodes (default: 5000 ms) + * `:message_module` - module implementing `Beacon.Adapter` behaviour (default: `Beacon.Adapter.ErlDist`) + """ + @spec start_link(atom, [start_option]) :: Supervisor.on_start() + def start_link(scope, opts \\ []) when is_atom(scope) do + {partitions, opts} = Keyword.pop(opts, :partitions, System.schedulers_online()) + broadcast_interval_in_ms = Keyword.get(opts, :broadcast_interval_in_ms) + + if not (is_integer(partitions) and partitions >= 1) do + raise ArgumentError, + "expected :partitions to be a positive integer, got: #{inspect(partitions)}" + end + + if broadcast_interval_in_ms != nil and + not (is_integer(broadcast_interval_in_ms) and broadcast_interval_in_ms > 0) do + raise ArgumentError, + "expected :broadcast_interval_in_ms to be a positive integer, got: #{inspect(broadcast_interval_in_ms)}" + end + + Beacon.Supervisor.start_link(scope, partitions, opts) + end + + @doc "Join pid to group in scope" + @spec join(atom, any, pid) :: :ok | {:error, :not_local} + def join(_scope, _group, pid) when is_pid(pid) and node(pid) != node(), do: {:error, :not_local} + + def join(scope, group, pid) when is_atom(scope) and is_pid(pid) do + Partition.join(Beacon.Supervisor.partition(scope, group), group, pid) + end + + @doc "Leave pid from group in scope" + @spec leave(atom, group, pid) :: :ok + def leave(scope, group, pid) when is_atom(scope) and is_pid(pid) do + Partition.leave(Beacon.Supervisor.partition(scope, group), group, pid) + end + + @doc "Get total members count per group in scope" + @spec member_counts(atom) :: %{group => non_neg_integer} + def member_counts(scope) when is_atom(scope) do + remote_counts = Scope.member_counts(scope) + + scope + |> local_member_counts() + |> Map.merge(remote_counts, fn _k, v1, v2 -> v1 + v2 end) + end + + @doc "Get total member count of group in scope" + @spec member_count(atom, group) :: non_neg_integer + def member_count(scope, group) do + local_member_count(scope, group) + Scope.member_count(scope, group) + end + + @doc "Get total member count of group in scope on specific node" + @spec member_count(atom, group, node) :: non_neg_integer + def member_count(scope, group, node) when node == node(), do: local_member_count(scope, group) + def member_count(scope, group, node), do: Scope.member_count(scope, group, node) + + @doc "Get local members of group in scope" + @spec local_members(atom, group) :: [pid] + def local_members(scope, group) when is_atom(scope) do + Partition.members(Beacon.Supervisor.partition(scope, group), group) + end + + @doc "Get local member count of group in scope" + @spec local_member_count(atom, group) :: non_neg_integer + def local_member_count(scope, group) when is_atom(scope) do + Partition.member_count(Beacon.Supervisor.partition(scope, group), group) + end + + @doc "Get local members count per group in scope" + @spec local_member_counts(atom) :: %{group => non_neg_integer} + def local_member_counts(scope) when is_atom(scope) do + Enum.reduce(Beacon.Supervisor.partitions(scope), %{}, fn partition_name, acc -> + Map.merge(acc, Partition.member_counts(partition_name)) + end) + end + + @doc "Check if pid is a local member of group in scope" + @spec local_member?(atom, group, pid) :: boolean + def local_member?(scope, group, pid) when is_atom(scope) and is_pid(pid) do + Partition.member?(Beacon.Supervisor.partition(scope, group), group, pid) + end + + @doc "Get all local groups in scope" + @spec local_groups(atom) :: [group] + def local_groups(scope) when is_atom(scope) do + Enum.flat_map(Beacon.Supervisor.partitions(scope), fn partition_name -> + Partition.groups(partition_name) + end) + end + + @doc "Get local group count in scope" + @spec local_group_count(atom) :: non_neg_integer + def local_group_count(scope) when is_atom(scope) do + Enum.sum_by(Beacon.Supervisor.partitions(scope), fn partition_name -> + Partition.group_count(partition_name) + end) + end + + @doc "Get groups in scope" + @spec groups(atom) :: [group] + def groups(scope) when is_atom(scope) do + remote_groups = Scope.groups(scope) + + scope + |> local_groups() + |> MapSet.new() + |> MapSet.union(remote_groups) + |> MapSet.to_list() + end + + @doc "Get group count in scope" + @spec group_count(atom) :: non_neg_integer + def group_count(scope) when is_atom(scope) do + remote_groups = Scope.groups(scope) + + scope + |> local_groups() + |> MapSet.new() + |> MapSet.union(remote_groups) + |> MapSet.size() + end +end diff --git a/beacon/lib/beacon/adapter.ex b/beacon/lib/beacon/adapter.ex new file mode 100644 index 000000000..cc3fb6abf --- /dev/null +++ b/beacon/lib/beacon/adapter.ex @@ -0,0 +1,17 @@ +defmodule Beacon.Adapter do + @moduledoc """ + Behaviour module for Beacon messaging adapters. + """ + + @doc "Register the current process to receive messages for the given scope" + @callback register(scope :: atom) :: :ok + + @doc "Broadcast a message to all nodes in the given scope" + @callback broadcast(scope :: atom, message :: term) :: any + + @doc "Broadcast a message to specific nodes in the given scope" + @callback broadcast(scope :: atom, [node], message :: term) :: any + + @doc "Send a message to a specific node in the given scope" + @callback send(scope :: atom, node, message :: term) :: any +end diff --git a/beacon/lib/beacon/adapter/erl_dist.ex b/beacon/lib/beacon/adapter/erl_dist.ex new file mode 100644 index 000000000..4f3c2b55a --- /dev/null +++ b/beacon/lib/beacon/adapter/erl_dist.ex @@ -0,0 +1,30 @@ +defmodule Beacon.Adapter.ErlDist do + @moduledoc false + + import Kernel, except: [send: 2] + + @behaviour Beacon.Adapter + + @impl true + def register(scope) do + Process.register(self(), Beacon.Supervisor.name(scope)) + :ok + end + + @impl true + def broadcast(scope, message) do + name = Beacon.Supervisor.name(scope) + Enum.each(Node.list(), fn node -> :erlang.send({name, node}, message, [:noconnect]) end) + end + + @impl true + def broadcast(scope, nodes, message) do + name = Beacon.Supervisor.name(scope) + Enum.each(nodes, fn node -> :erlang.send({name, node}, message, [:noconnect]) end) + end + + @impl true + def send(scope, node, message) do + :erlang.send({Beacon.Supervisor.name(scope), node}, message, [:noconnect]) + end +end diff --git a/beacon/lib/beacon/partition.ex b/beacon/lib/beacon/partition.ex new file mode 100644 index 000000000..e494562bc --- /dev/null +++ b/beacon/lib/beacon/partition.ex @@ -0,0 +1,147 @@ +defmodule Beacon.Partition do + @moduledoc false + + use GenServer + require Logger + + defmodule State do + @moduledoc false + @type t :: %__MODULE__{ + name: atom, + scope: atom, + entries_table: atom, + monitors: %{{Beacon.group(), pid} => reference} + } + defstruct [:name, :scope, :entries_table, monitors: %{}] + end + + @spec join(atom, Beacon.group(), pid) :: :ok + def join(partition_name, group, pid), do: GenServer.call(partition_name, {:join, group, pid}) + + @spec leave(atom, Beacon.group(), pid) :: :ok + def leave(partition_name, group, pid), do: GenServer.call(partition_name, {:leave, group, pid}) + + @spec members(atom, Beacon.group()) :: [pid] + def members(partition_name, group) do + partition_name + |> Beacon.Supervisor.partition_entries_table() + |> :ets.select([{{{group, :"$1"}}, [], [:"$1"]}]) + end + + @spec member_count(atom, Beacon.group()) :: non_neg_integer + def member_count(partition_name, group), do: :ets.lookup_element(partition_name, group, 2, 0) + + @spec member_counts(atom) :: %{Beacon.group() => non_neg_integer} + def member_counts(partition_name) do + partition_name + |> :ets.tab2list() + |> Map.new() + end + + @spec member?(atom, Beacon.group(), pid) :: boolean + def member?(partition_name, group, pid) do + partition_name + |> Beacon.Supervisor.partition_entries_table() + |> :ets.lookup({group, pid}) + |> case do + [{{^group, ^pid}}] -> true + [] -> false + end + end + + @spec groups(atom) :: [Beacon.group()] + def groups(partition_name), do: :ets.select(partition_name, [{{:"$1", :_}, [], [:"$1"]}]) + + @spec group_count(atom) :: non_neg_integer + def group_count(partition_name), do: :ets.info(partition_name, :size) + + @spec start_link(atom, atom, atom) :: GenServer.on_start() + def start_link(scope, partition_name, partition_entries_table), + do: + GenServer.start_link(__MODULE__, [scope, partition_name, partition_entries_table], + name: partition_name + ) + + @impl true + @spec init(any) :: {:ok, State.t()} + def init([scope, name, entries_table]) do + {:ok, %State{scope: scope, name: name, entries_table: entries_table}, + {:continue, :rebuild_monitors_and_counters}} + end + + @impl true + @spec handle_continue(:rebuild_monitors_and_counters, State.t()) :: {:noreply, State.t()} + def handle_continue(:rebuild_monitors_and_counters, state) do + # Here we delete all counters and rebuild them based on entries table + :ets.delete_all_objects(state.name) + + monitors = + :ets.tab2list(state.entries_table) + |> Enum.reduce(%{}, fn {{group, pid}}, monitors_acc -> + ref = Process.monitor(pid, tag: {:DOWN, group}) + :ets.update_counter(state.name, group, {2, 1}, {group, 0}) + Map.put(monitors_acc, {group, pid}, ref) + end) + + {:noreply, %{state | monitors: monitors}} + end + + @impl true + @spec handle_call({:join, Beacon.group(), pid}, GenServer.from(), State.t()) :: + {:reply, :ok, State.t()} + def handle_call({:join, group, pid}, _from, state) do + if :ets.insert_new(state.entries_table, {{group, pid}}) do + # Increment existing or create + :ets.update_counter(state.name, group, {2, 1}, {group, 0}) + ref = Process.monitor(pid, tag: {:DOWN, group}) + monitors = Map.put(state.monitors, {group, pid}, ref) + {:reply, :ok, %{state | monitors: monitors}} + else + {:reply, :ok, state} + end + end + + def handle_call({:leave, group, pid}, _from, state) do + state = remove(group, pid, state) + {:reply, :ok, state} + end + + @impl true + @spec handle_info({{:DOWN, Beacon.group()}, reference, :process, pid, term}, State.t()) :: + {:noreply, State.t()} + def handle_info({{:DOWN, group}, _ref, :process, pid, _reason}, state) do + state = remove(group, pid, state) + {:noreply, state} + end + + def handle_info(_, state), do: {:noreply, state} + + defp remove(group, pid, state) do + case :ets.lookup(state.entries_table, {group, pid}) do + [{{^group, ^pid}}] -> + :ets.delete(state.entries_table, {group, pid}) + + # Delete or decrement counter + case :ets.lookup_element(state.name, group, 2, 0) do + 1 -> :ets.delete(state.name, group) + count when count > 1 -> :ets.update_counter(state.name, group, {2, -1}) + end + + [] -> + Logger.warning( + "Beacon[#{node()}|#{state.scope}] Trying to remove an unknown process #{inspect(pid)}" + ) + + :ok + end + + case Map.pop(state.monitors, {group, pid}) do + {nil, _} -> + state + + {ref, new_monitors} -> + Process.demonitor(ref, [:flush]) + %{state | monitors: new_monitors} + end + end +end diff --git a/beacon/lib/beacon/scope.ex b/beacon/lib/beacon/scope.ex new file mode 100644 index 000000000..72a43ba1c --- /dev/null +++ b/beacon/lib/beacon/scope.ex @@ -0,0 +1,208 @@ +defmodule Beacon.Scope do + @moduledoc false + # Responsible to discover and keep track of all Beacon peers in the cluster + + use GenServer + require Logger + + @default_broadcast_interval 5_000 + + @spec member_counts(atom) :: %{Beacon.group() => non_neg_integer} + def member_counts(scope) do + scope + |> table_name() + |> :ets.select([{{:_, :"$1"}, [], [:"$1"]}]) + |> Enum.reduce(%{}, fn member_counts, acc -> + Map.merge(acc, member_counts, fn _k, v1, v2 -> v1 + v2 end) + end) + end + + @spec member_count(atom, Beacon.group()) :: non_neg_integer + def member_count(scope, group) do + scope + |> table_name() + |> :ets.select([{{:_, :"$1"}, [], [:"$1"]}]) + |> Enum.sum_by(fn member_counts -> Map.get(member_counts, group, 0) end) + end + + @spec member_count(atom, Beacon.group(), node) :: non_neg_integer + def member_count(scope, group, node) do + case :ets.lookup(table_name(scope), node) do + [{^node, member_counts}] -> Map.get(member_counts, group, 0) + [] -> 0 + end + end + + @spec groups(atom) :: MapSet.t(Beacon.group()) + def groups(scope) do + scope + |> table_name() + |> :ets.select([{{:_, :"$1"}, [], [:"$1"]}]) + |> Enum.reduce(MapSet.new(), fn member_counts, acc -> + member_counts + |> Map.keys() + |> MapSet.new() + |> MapSet.union(acc) + end) + end + + @typep member_counts :: %{Beacon.group() => non_neg_integer} + + defp table_name(scope), do: :"#{scope}_beacon_peer_counts" + + defmodule State do + @moduledoc false + @type t :: %__MODULE__{ + scope: atom, + message_module: module, + broadcast_interval: non_neg_integer, + peer_counts_table: :ets.tid(), + peers: %{pid => reference} + } + defstruct [ + :scope, + :message_module, + :broadcast_interval, + :peer_counts_table, + peers: %{} + ] + end + + @spec start_link(atom, Keyword.t()) :: GenServer.on_start() + def start_link(scope, opts \\ []), do: GenServer.start_link(__MODULE__, [scope, opts]) + + @impl true + def init([scope, opts]) do + :ok = :net_kernel.monitor_nodes(true) + + peer_counts_table = + :ets.new(table_name(scope), [:set, :protected, :named_table, read_concurrency: true]) + + broadcast_interval = + Keyword.get(opts, :broadcast_interval_in_ms, @default_broadcast_interval) + + message_module = Keyword.get(opts, :message_module, Beacon.Adapter.ErlDist) + + Logger.info("Beacon[#{node()}|#{scope}] Starting") + + :ok = message_module.register(scope) + + {:ok, + %State{ + scope: scope, + message_module: message_module, + broadcast_interval: broadcast_interval, + peer_counts_table: peer_counts_table + }, {:continue, :discover}} + end + + @impl true + @spec handle_continue(:discover, State.t()) :: {:noreply, State.t()} + def handle_continue(:discover, state) do + state.message_module.broadcast(state.scope, {:discover, self()}) + Process.send_after(self(), :broadcast_counts, state.broadcast_interval) + {:noreply, state} + end + + @impl true + @spec handle_info( + {:discover, pid} + | {:sync, pid, member_counts} + | :broadcast_counts + | {:nodeup, node} + | {:nodedown, node} + | {:DOWN, reference, :process, pid, term}, + State.t() + ) :: {:noreply, State.t()} + # A remote peer is discovering us + def handle_info({:discover, peer}, state) do + Logger.info( + "Beacon[#{node()}|#{state.scope}] Received DISCOVER request from node #{node(peer)}" + ) + + state.message_module.send( + state.scope, + node(peer), + {:sync, self(), Beacon.local_member_counts(state.scope)} + ) + + # We don't do anything if we already know about this peer + if Map.has_key?(state.peers, peer) do + Logger.debug( + "Beacon[#{node()}|#{state.scope}] already know peer #{inspect(peer)} from node #{node(peer)}" + ) + + {:noreply, state} + else + Logger.debug( + "Beacon[#{node()}|#{state.scope}] discovered peer #{inspect(peer)} from node #{node(peer)}" + ) + + ref = Process.monitor(peer) + new_peers = Map.put(state.peers, peer, ref) + state.message_module.send(state.scope, node(peer), {:discover, self()}) + {:noreply, %State{state | peers: new_peers}} + end + end + + # A remote peer has sent us its local member counts + def handle_info({:sync, peer, member_counts}, state) do + :ets.insert(state.peer_counts_table, {node(peer), member_counts}) + {:noreply, state} + end + + # Periodic broadcast of our local member counts to all known peers + def handle_info(:broadcast_counts, state) do + nodes = + state.peers + |> Map.keys() + |> Enum.map(&node/1) + + state.message_module.broadcast( + state.scope, + nodes, + {:sync, self(), Beacon.local_member_counts(state.scope)} + ) + + Process.send_after(self(), :broadcast_counts, state.broadcast_interval) + {:noreply, state} + end + + # Do nothing if the node that came up is our own node + def handle_info({:nodeup, node}, state) when node == node(), do: {:noreply, state} + + # Send a discover message to the node that just connected + def handle_info({:nodeup, node}, state) do + :telemetry.execute([:beacon, state.scope, :node, :up], %{}, %{node: node}) + + Logger.info( + "Beacon[#{node()}|#{state.scope}] Node #{node} has joined the cluster, sending discover message" + ) + + state.message_module.send(state.scope, node, {:discover, self()}) + {:noreply, state} + end + + # Do nothing and wait for the DOWN message from monitor + def handle_info({:nodedown, _node}, state), do: {:noreply, state} + + # A remote peer has disconnected/crashed + # We forget about it and remove its member counts + def handle_info({:DOWN, ref, :process, peer, reason}, state) do + Logger.info( + "Beacon[#{node()}|#{state.scope}] Scope process is DOWN on node #{node(peer)}: #{inspect(reason)}" + ) + + case Map.pop(state.peers, peer) do + {nil, _} -> + {:noreply, state} + + {^ref, new_peers} -> + :ets.delete(state.peer_counts_table, node(peer)) + :telemetry.execute([:beacon, state.scope, :node, :down], %{}, %{node: node(peer)}) + {:noreply, %State{state | peers: new_peers}} + end + end + + def handle_info(_msg, state), do: {:noreply, state} +end diff --git a/beacon/lib/beacon/supervisor.ex b/beacon/lib/beacon/supervisor.ex new file mode 100644 index 000000000..fae322813 --- /dev/null +++ b/beacon/lib/beacon/supervisor.ex @@ -0,0 +1,61 @@ +defmodule Beacon.Supervisor do + @moduledoc false + use Supervisor + + def name(scope), do: :"#{scope}_beacon" + def supervisor_name(scope), do: :"#{scope}_beacon_supervisor" + def partition_name(scope, partition), do: :"#{scope}_beacon_partition_#{partition}" + def partition_entries_table(partition_name), do: :"#{partition_name}_entries" + + @spec partition(atom, Scope.group()) :: atom + def partition(scope, group) do + case :persistent_term.get(scope, :unknown) do + :unknown -> raise "Beacon for scope #{inspect(scope)} is not started" + partition_names -> elem(partition_names, :erlang.phash2(group, tuple_size(partition_names))) + end + end + + @spec partitions(atom) :: [atom] + def partitions(scope) do + case :persistent_term.get(scope, :unknown) do + :unknown -> raise "Beacon for scope #{inspect(scope)} is not started" + partition_names -> Tuple.to_list(partition_names) + end + end + + @spec start_link(atom, pos_integer(), Keyword.t()) :: Supervisor.on_start() + def start_link(scope, partitions, opts \\ []) do + args = [scope, partitions, opts] + Supervisor.start_link(__MODULE__, args, name: supervisor_name(scope)) + end + + @impl true + def init([scope, partitions, opts]) do + children = + for i <- 0..(partitions - 1) do + partition_name = partition_name(scope, i) + partition_entries_table = partition_entries_table(partition_name) + + ^partition_entries_table = + :ets.new(partition_entries_table, [:set, :public, :named_table, read_concurrency: true]) + + ^partition_name = + :ets.new(partition_name, [:set, :public, :named_table, read_concurrency: true]) + + %{ + id: i, + start: {Beacon.Partition, :start_link, [scope, partition_name, partition_entries_table]} + } + end + + partition_names = for i <- 0..(partitions - 1), do: partition_name(scope, i) + + :persistent_term.put(scope, List.to_tuple(partition_names)) + + children = [ + %{id: :scope, start: {Beacon.Scope, :start_link, [scope, opts]}} | children + ] + + Supervisor.init(children, strategy: :one_for_one) + end +end diff --git a/beacon/mix.exs b/beacon/mix.exs new file mode 100644 index 000000000..4448f5f1e --- /dev/null +++ b/beacon/mix.exs @@ -0,0 +1,34 @@ +defmodule Beacon.MixProject do + use Mix.Project + + def project do + [ + app: :beacon, + version: "1.0.0", + elixir: "~> 1.18", + start_permanent: Mix.env() == :prod, + elixirc_paths: elixirc_paths(Mix.env()), + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Specifies which paths to compile per environment. + defp elixirc_paths(:test), do: ["lib", "test/support"] + defp elixirc_paths(_), do: ["lib"] + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + {:telemetry, "~> 1.3"}, + {:mix_test_watch, "~> 1.0", only: [:dev, :test], runtime: false} + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} + ] + end +end diff --git a/beacon/mix.lock b/beacon/mix.lock new file mode 100644 index 000000000..2ba2a6c23 --- /dev/null +++ b/beacon/mix.lock @@ -0,0 +1,5 @@ +%{ + "file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"}, + "mix_test_watch": {:hex, :mix_test_watch, "1.4.0", "d88bcc4fbe3198871266e9d2f00cd8ae350938efbb11d3fa1da091586345adbb", [:mix], [{:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "2b4693e17c8ead2ef56d4f48a0329891e8c2d0d73752c0f09272a2b17dc38d1b"}, + "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, +} diff --git a/beacon/test/beacon/partition_test.exs b/beacon/test/beacon/partition_test.exs new file mode 100644 index 000000000..0b105d771 --- /dev/null +++ b/beacon/test/beacon/partition_test.exs @@ -0,0 +1,185 @@ +defmodule Beacon.PartitionTest do + use ExUnit.Case, async: true + alias Beacon.Partition + + setup do + scope = __MODULE__ + partition_name = Beacon.Supervisor.partition_name(scope, System.unique_integer([:positive])) + entries_table = Beacon.Supervisor.partition_entries_table(partition_name) + + ^partition_name = + :ets.new(partition_name, [:set, :public, :named_table, read_concurrency: true]) + + ^entries_table = + :ets.new(entries_table, [:set, :public, :named_table, read_concurrency: true]) + + spec = %{ + id: partition_name, + start: {Partition, :start_link, [scope, partition_name, entries_table]}, + type: :supervisor, + restart: :temporary + } + + pid = start_supervised!(spec) + + {:ok, partition_name: partition_name, partition_pid: pid} + end + + test "members/2 returns empty list for non-existent group", %{partition_name: partition} do + assert Partition.members(partition, :nonexistent) == [] + end + + test "member_count/2 returns 0 for non-existent group", %{partition_name: partition} do + assert Partition.member_count(partition, :nonexistent) == 0 + end + + test "member?/3 returns false for non-member", %{partition_name: partition} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + refute Partition.member?(partition, :group1, pid) + end + + test "join and query member", %{partition_name: partition} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + + assert :ok = Partition.join(partition, :group1, pid) + + assert Partition.member?(partition, :group1, pid) + assert Partition.member_count(partition, :group1) == 1 + assert pid in Partition.members(partition, :group1) + end + + test "join multiple times and query member", %{partition_name: partition} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + + assert :ok = Partition.join(partition, :group1, pid) + assert :ok = Partition.join(partition, :group1, pid) + assert :ok = Partition.join(partition, :group1, pid) + + assert Partition.member?(partition, :group1, pid) + assert Partition.member_count(partition, :group1) == 1 + assert pid in Partition.members(partition, :group1) + end + + test "leave removes member", %{partition_name: partition} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid) + assert Partition.member?(partition, :group1, pid) + + Partition.leave(partition, :group1, pid) + refute Partition.member?(partition, :group1, pid) + end + + test "leave multiple times removes member", %{partition_name: partition} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid) + assert Partition.member?(partition, :group1, pid) + + Partition.leave(partition, :group1, pid) + Partition.leave(partition, :group1, pid) + Partition.leave(partition, :group1, pid) + refute Partition.member?(partition, :group1, pid) + end + + test "member_counts returns counts for all groups", %{partition_name: partition} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + pid3 = spawn_link(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid1) + Partition.join(partition, :group1, pid2) + Partition.join(partition, :group2, pid3) + + counts = Partition.member_counts(partition) + assert map_size(counts) == 2 + assert counts[:group1] == 2 + assert counts[:group2] == 1 + end + + test "groups returns all groups", %{partition_name: partition} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid1) + Partition.join(partition, :group2, pid2) + + groups = Partition.groups(partition) + assert :group1 in groups + assert :group2 in groups + end + + test "group_counts returns number of groups", %{partition_name: partition} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + pid3 = spawn_link(fn -> Process.sleep(:infinity) end) + pid4 = spawn_link(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid1) + Partition.join(partition, :group1, pid2) + Partition.join(partition, :group2, pid3) + Partition.join(partition, :group3, pid4) + + assert Partition.group_count(partition) == 3 + end + + test "process death removes member from group", %{partition_name: partition} do + pid = spawn(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid) + assert Partition.member?(partition, :group1, pid) + + Process.exit(pid, :kill) + Process.sleep(50) + + refute Partition.member?(partition, :group1, pid) + assert Partition.member_count(partition, :group1) == 0 + end + + test "partition recovery monitors processes again", %{ + partition_name: partition, + partition_pid: partition_pid + } do + pid1 = spawn(fn -> Process.sleep(:infinity) end) + pid2 = spawn(fn -> Process.sleep(:infinity) end) + + Partition.join(partition, :group1, pid1) + Partition.join(partition, :group2, pid2) + + monitors = Process.info(partition_pid, [:monitors])[:monitors] |> Enum.map(&elem(&1, 1)) + assert length(monitors) + assert monitors |> Enum.member?(pid1) + assert monitors |> Enum.member?(pid2) + + assert %{{:group1, ^pid1} => _ref1, {:group2, ^pid2} => _ref2} = + :sys.get_state(partition_pid).monitors + + Process.monitor(partition_pid) + Process.exit(partition_pid, :kill) + assert_receive {:DOWN, _ref, :process, ^partition_pid, :killed} + + spec = %{ + id: :recover, + start: + {Partition, :start_link, + [__MODULE__, partition, Beacon.Supervisor.partition_entries_table(partition)]}, + type: :supervisor + } + + partition_pid = start_supervised!(spec) + + assert %{{:group1, ^pid1} => _ref1, {:group2, ^pid2} => _ref2} = + :sys.get_state(partition_pid).monitors + + monitors = Process.info(partition_pid, [:monitors])[:monitors] |> Enum.map(&elem(&1, 1)) + assert length(monitors) + assert monitors |> Enum.member?(pid1) + assert monitors |> Enum.member?(pid2) + + assert Partition.member_count(partition, :group1) == 1 + assert Partition.member_count(partition, :group2) == 1 + + assert Partition.member?(partition, :group1, pid1) + assert Partition.member?(partition, :group2, pid2) + end +end diff --git a/beacon/test/beacon_test.exs b/beacon/test/beacon_test.exs new file mode 100644 index 000000000..f82270e1f --- /dev/null +++ b/beacon/test/beacon_test.exs @@ -0,0 +1,469 @@ +defmodule BeaconTest do + use ExUnit.Case, async: true + + setup do + scope = :"test_scope#{System.unique_integer([:positive])}" + + %{scope: scope} + end + + defp spec(scope, opts) do + %{ + id: scope, + start: {Beacon, :start_link, [scope, opts]}, + type: :supervisor + } + end + + describe "start_link/2" do + test "starts beacon with default partitions", %{scope: scope} do + pid = start_supervised!({Beacon, [scope, []]}) + assert Process.alive?(pid) + assert is_list(Beacon.Supervisor.partitions(scope)) + assert length(Beacon.Supervisor.partitions(scope)) == System.schedulers_online() + end + + test "starts beacon with custom partition count", %{scope: scope} do + pid = start_supervised!(spec(scope, partitions: 3)) + assert Process.alive?(pid) + assert length(Beacon.Supervisor.partitions(scope)) == 3 + end + + test "raises on invalid partition count", %{scope: scope} do + assert_raise ArgumentError, ~r/expected :partitions to be a positive integer/, fn -> + Beacon.start_link(scope, partitions: 0) + end + + assert_raise ArgumentError, ~r/expected :partitions to be a positive integer/, fn -> + Beacon.start_link(scope, partitions: -1) + end + + assert_raise ArgumentError, ~r/expected :partitions to be a positive integer/, fn -> + Beacon.start_link(scope, partitions: :invalid) + end + end + + test "raises on invalid broadcast_interval_in_ms", %{scope: scope} do + assert_raise ArgumentError, + ~r/expected :broadcast_interval_in_ms to be a positive integer/, + fn -> + Beacon.start_link(scope, broadcast_interval_in_ms: 0) + end + + assert_raise ArgumentError, + ~r/expected :broadcast_interval_in_ms to be a positive integer/, + fn -> + Beacon.start_link(scope, broadcast_interval_in_ms: -1) + end + + assert_raise ArgumentError, + ~r/expected :broadcast_interval_in_ms to be a positive integer/, + fn -> + Beacon.start_link(scope, broadcast_interval_in_ms: :invalid) + end + end + end + + describe "join/3 and leave/3" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "can join a group", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + assert :ok = Beacon.join(scope, :group1, pid) + assert Beacon.local_member?(scope, :group1, pid) + end + + test "can leave a group", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + assert :ok = Beacon.join(scope, :group1, pid) + assert Beacon.local_member?(scope, :group1, pid) + + assert :ok = Beacon.leave(scope, :group1, pid) + refute Beacon.local_member?(scope, :group1, pid) + end + + test "joining same group twice is idempotent", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + assert :ok = Beacon.join(scope, :group1, pid) + assert :ok = Beacon.join(scope, :group1, pid) + assert Beacon.local_member_count(scope, :group1) == 1 + end + + test "multiple processes can join same group", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + + assert :ok = Beacon.join(scope, :group1, pid1) + assert :ok = Beacon.join(scope, :group1, pid2) + + members = Beacon.local_members(scope, :group1) + assert length(members) == 2 + assert pid1 in members + assert pid2 in members + end + + test "process can join multiple groups", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + + assert :ok = Beacon.join(scope, :group1, pid) + assert :ok = Beacon.join(scope, :group2, pid) + + assert Beacon.local_member?(scope, :group1, pid) + assert Beacon.local_member?(scope, :group2, pid) + end + + test "automatically removes member when process dies", %{scope: scope} do + pid = spawn(fn -> Process.sleep(:infinity) end) + assert :ok = Beacon.join(scope, :group1, pid) + assert Beacon.local_member?(scope, :group1, pid) + + Process.exit(pid, :kill) + Process.sleep(50) + + refute Beacon.local_member?(scope, :group1, pid) + assert Beacon.local_member_count(scope, :group1) == 0 + end + end + + describe "local_members/2" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns empty list for non-existent group", %{scope: scope} do + assert Beacon.local_members(scope, :nonexistent) == [] + end + + test "returns all members of a group", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + pid3 = spawn_link(fn -> Process.sleep(:infinity) end) + + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group1, pid2) + Beacon.join(scope, :group2, pid3) + + members = Beacon.local_members(scope, :group1) + assert length(members) == 2 + assert pid1 in members + assert pid2 in members + refute pid3 in members + end + end + + describe "local_member_count/2" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns 0 for non-existent group", %{scope: scope} do + assert Beacon.local_member_count(scope, :nonexistent) == 0 + end + + test "returns correct count", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + + assert Beacon.local_member_count(scope, :group1) == 0 + + Beacon.join(scope, :group1, pid1) + assert Beacon.local_member_count(scope, :group1) == 1 + + Beacon.join(scope, :group1, pid2) + assert Beacon.local_member_count(scope, :group1) == 2 + + Beacon.leave(scope, :group1, pid1) + assert Beacon.local_member_count(scope, :group1) == 1 + end + end + + describe "local_member_counts/1" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns empty map when no groups exist", %{scope: scope} do + assert Beacon.local_member_counts(scope) == %{} + end + + test "returns counts for all groups", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + pid3 = spawn_link(fn -> Process.sleep(:infinity) end) + + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group1, pid2) + Beacon.join(scope, :group2, pid3) + + assert Beacon.local_member_counts(scope) == %{ + group1: 2, + group2: 1 + } + end + end + + describe "local_member?/3" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns false for non-member", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + refute Beacon.local_member?(scope, :group1, pid) + end + + test "returns true for member", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid) + assert Beacon.local_member?(scope, :group1, pid) + end + + test "returns false after leaving", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + + Beacon.join(scope, :group1, pid) + Beacon.leave(scope, :group1, pid) + + refute Beacon.local_member?(scope, :group1, pid) + end + end + + describe "local_groups/1" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns empty list when no groups exist", %{scope: scope} do + assert Beacon.local_groups(scope) == [] + end + + test "returns all groups with members", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group2, pid2) + Beacon.join(scope, :group3, pid1) + + groups = Beacon.local_groups(scope) + assert :group1 in groups + assert :group2 in groups + assert :group3 in groups + assert length(groups) == 3 + end + + test "removes group from list when last member leaves", %{scope: scope} do + pid = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid) + assert :group1 in Beacon.local_groups(scope) + + Beacon.leave(scope, :group1, pid) + refute :group1 in Beacon.local_groups(scope) + end + end + + describe "local_group_count/1" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns 0 when no groups exist", %{scope: scope} do + assert Beacon.local_group_count(scope) == 0 + end + + test "returns correct count of groups", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group2, pid2) + Beacon.join(scope, :group3, pid2) + Beacon.join(scope, :group3, pid1) + assert Beacon.local_group_count(scope) == 3 + Beacon.leave(scope, :group2, pid2) + assert Beacon.local_group_count(scope) == 2 + end + end + + describe "member_counts/1" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 2)) + :ok + end + + test "returns local counts when no peers", %{scope: scope} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group1, pid2) + + counts = Beacon.member_counts(scope) + assert counts[:group1] == 2 + end + end + + describe "partition distribution" do + setup %{scope: scope} do + start_supervised!(spec(scope, partitions: 4)) + :ok + end + + test "distributes groups across partitions", %{scope: scope} do + # Create multiple processes and verify they're split against different partitions + pids = for _ <- 1..20, do: spawn_link(fn -> Process.sleep(:infinity) end) + + Enum.each(pids, fn pid -> + Beacon.join(scope, pid, pid) + end) + + # Check that multiple partitions are being used + partition_names = Beacon.Supervisor.partitions(scope) + + Enum.map(partition_names, fn partition_name -> + assert Beacon.Partition.member_counts(partition_name) > 1 + end) + end + + test "same group always maps to same partition", %{scope: scope} do + partition1 = Beacon.Supervisor.partition(scope, :my_group) + partition2 = Beacon.Supervisor.partition(scope, :my_group) + partition3 = Beacon.Supervisor.partition(scope, :my_group) + + assert partition1 == partition2 + assert partition2 == partition3 + end + end + + @aux_mod (quote do + defmodule PeerAux do + def start(scope) do + spawn(fn -> + {:ok, _} = Beacon.start_link(scope, broadcast_interval_in_ms: 50) + + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group2, pid2) + Beacon.join(scope, :group3, pid2) + + Process.sleep(:infinity) + end) + end + end + end) + + describe "distributed tests" do + setup do + scope = :"broadcast_scope#{System.unique_integer([:positive])}" + supervisor_pid = start_supervised!(spec(scope, partitions: 2, broadcast_interval_in_ms: 50)) + {:ok, peer, node} = Peer.start_disconnected(aux_mod: @aux_mod) + + ref = + :telemetry_test.attach_event_handlers(self(), [ + [:beacon, scope, :node, :up], + [:beacon, scope, :node, :down] + ]) + + %{scope: scope, supervisor_pid: supervisor_pid, peer: peer, node: node, telemetry_ref: ref} + end + + test "node up", %{scope: scope, peer: peer, node: node, telemetry_ref: telemetry_ref} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group1, pid2) + Beacon.join(scope, :group2, pid2) + + true = Node.connect(node) + :peer.call(peer, PeerAux, :start, [scope]) + + assert_receive {[:beacon, ^scope, :node, :up], ^telemetry_ref, %{}, %{node: ^node}} + + # Wait for at least one broadcast interval + Process.sleep(150) + assert Beacon.group_count(scope) == 3 + groups = Beacon.groups(scope) + + assert length(groups) == 3 + assert :group1 in groups + assert :group2 in groups + assert :group3 in groups + + assert Beacon.member_counts(scope) == %{group1: 3, group2: 2, group3: 1} + assert Beacon.member_count(scope, :group1) == 3 + assert Beacon.member_count(scope, :group3, node) == 1 + assert Beacon.member_count(scope, :group1, node()) == 2 + end + + test "node down", %{scope: scope, peer: peer, node: node, telemetry_ref: telemetry_ref} do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group1, pid2) + Beacon.join(scope, :group2, pid2) + + true = Node.connect(node) + :peer.call(peer, PeerAux, :start, [scope]) + assert_receive {[:beacon, ^scope, :node, :up], ^telemetry_ref, %{}, %{node: ^node}} + # Wait for remote scope to communicate with local + Process.sleep(150) + + true = Node.disconnect(node) + + assert_receive {[:beacon, ^scope, :node, :down], ^telemetry_ref, %{}, %{node: ^node}} + + assert Beacon.member_counts(scope) == %{group1: 2, group2: 1} + assert Beacon.member_count(scope, :group1) == 2 + end + + test "scope restart can recover", %{ + scope: scope, + supervisor_pid: supervisor_pid, + peer: peer, + node: node, + telemetry_ref: telemetry_ref + } do + pid1 = spawn_link(fn -> Process.sleep(:infinity) end) + pid2 = spawn_link(fn -> Process.sleep(:infinity) end) + Beacon.join(scope, :group1, pid1) + Beacon.join(scope, :group1, pid2) + Beacon.join(scope, :group2, pid2) + + true = Node.connect(node) + :peer.call(peer, PeerAux, :start, [scope]) + assert_receive {[:beacon, ^scope, :node, :up], ^telemetry_ref, %{}, %{node: ^node}} + + # Wait for remote scope to communicate with local + Process.sleep(150) + + [ + {1, _, :worker, [Beacon.Partition]}, + {0, _, :worker, [Beacon.Partition]}, + {:scope, scope_pid, :worker, [Beacon.Scope]} + ] = Supervisor.which_children(supervisor_pid) + + # Restart the scope process + Process.monitor(scope_pid) + Process.exit(scope_pid, :kill) + assert_receive {:DOWN, _ref, :process, ^scope_pid, :killed} + # Wait for recovery and communication + Process.sleep(200) + assert Beacon.group_count(scope) == 3 + groups = Beacon.groups(scope) + assert length(groups) == 3 + assert :group1 in groups + assert :group2 in groups + assert :group3 in groups + assert Beacon.member_counts(scope) == %{group1: 3, group2: 2, group3: 1} + end + end +end diff --git a/beacon/test/support/peer.ex b/beacon/test/support/peer.ex new file mode 100644 index 000000000..42ab7e8cd --- /dev/null +++ b/beacon/test/support/peer.ex @@ -0,0 +1,89 @@ +defmodule Peer do + @moduledoc """ + Uses the gist https://gist.github.com/ityonemo/177cbc96f8c8722bfc4d127ff9baec62 to start a node for testing + """ + + @doc """ + Starts a node for testing. + + Can receive an auxiliary module to be evaluated in the node so you are able to setup functions within the test context and outside of the normal code context + + e.g. + ``` + @aux_mod (quote do + defmodule Aux do + def checker(res), do: res + end + end) + + Code.eval_quoted(@aux_mod) + test "clustered call" do + {:ok, node} = Clustered.start(@aux_mod) + assert ok = :rpc.call(node, Aux, :checker, [:ok]) + end + ``` + """ + @spec start(Keyword.t()) :: {:ok, :peer.server_ref(), node} + def start(opts \\ []) do + {:ok, peer, node} = start_disconnected(opts) + + true = Node.connect(node) + + {:ok, peer, node} + end + + @doc """ + Similar to `start/2` but the node is not connected automatically + """ + @spec start_disconnected(Keyword.t()) :: {:ok, :peer.server_ref(), node} + def start_disconnected(opts \\ []) do + extra_config = Keyword.get(opts, :extra_config, []) + name = Keyword.get(opts, :name, :peer.random_name()) + aux_mod = Keyword.get(opts, :aux_mod, nil) + + true = :erlang.set_cookie(:cookie) + + {:ok, pid, node} = + ExUnit.Callbacks.start_supervised(%{ + id: {:peer, name}, + start: + {:peer, :start_link, + [ + %{ + name: name, + host: ~c"127.0.0.1", + longnames: true, + connection: :standard_io + } + ]} + }) + + :peer.call(pid, :erlang, :set_cookie, [:cookie]) + + :ok = :peer.call(pid, :code, :add_paths, [:code.get_path()]) + + for {app_name, _, _} <- Application.loaded_applications(), + {key, value} <- Application.get_all_env(app_name) do + :ok = :peer.call(pid, Application, :put_env, [app_name, key, value]) + end + + # Override with extra config + for {app_name, key, value} <- extra_config do + :ok = :peer.call(pid, Application, :put_env, [app_name, key, value]) + end + + {:ok, _} = :peer.call(pid, Application, :ensure_all_started, [:mix]) + :ok = :peer.call(pid, Mix, :env, [Mix.env()]) + + Enum.map( + [:logger, :runtime_tools, :mix, :os_mon, :beacon], + fn app -> {:ok, _} = :peer.call(pid, Application, :ensure_all_started, [app]) end + ) + + if aux_mod do + {{:module, _, _, _}, []} = :peer.call(pid, Code, :eval_quoted, [aux_mod]) + end + + {:ok, pid, node} + end +end diff --git a/beacon/test/test_helper.exs b/beacon/test/test_helper.exs new file mode 100644 index 000000000..eea6cb589 --- /dev/null +++ b/beacon/test/test_helper.exs @@ -0,0 +1,3 @@ +ExUnit.start(capture_log: true) + +:net_kernel.start([:"beacon@127.0.0.1"]) diff --git a/config/config.exs b/config/config.exs index cada8230f..ced9f6f7c 100644 --- a/config/config.exs +++ b/config/config.exs @@ -8,6 +8,7 @@ import Config config :realtime, + websocket_fullsweep_after: 20, ecto_repos: [Realtime.Repo], version: Mix.Project.config()[:version] @@ -80,6 +81,8 @@ config :gen_rpc, # This is used for process sanitation purposes so please make sure to set it in a sufficiently high number async_call_inactivity_timeout: 300_000 +config :prom_ex, :storage_adapter, Realtime.PromEx.Store + # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env()}.exs" diff --git a/config/dev.exs b/config/dev.exs index a438f8ea4..0eff300d8 100644 --- a/config/dev.exs +++ b/config/dev.exs @@ -97,6 +97,8 @@ config :phoenix, :plug_init_mode, :runtime # Disable caching to ensure the rendered spec is refreshed config :open_api_spex, :cache_adapter, OpenApiSpex.Plug.NoneCache -config :opentelemetry, traces_exporter: {:otel_exporter_stdout, []} +# Disabled but can print to stdout with: +# config :opentelemetry, traces_exporter: {:otel_exporter_stdout, []} +config :opentelemetry, traces_exporter: :none config :mix_test_watch, clear: true diff --git a/config/prod.exs b/config/prod.exs index bcfc25bc9..146420af9 100644 --- a/config/prod.exs +++ b/config/prod.exs @@ -22,6 +22,7 @@ config :logger, :warning, :project, :external_id, :application_name, + :cluster, :region, :request_id, :sub, diff --git a/config/runtime.exs b/config/runtime.exs index 39310f093..ae85d6611 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -3,17 +3,17 @@ import Config defmodule Env do def get_integer(env, default) do value = System.get_env(env) - if value, do: String.to_integer(env), else: default + if value, do: String.to_integer(value), else: default end def get_charlist(env, default) do value = System.get_env(env) - if value, do: String.to_charlist(env), else: default + if value, do: String.to_charlist(value), else: default end def get_boolean(env, default) do value = System.get_env(env) - if value, do: String.to_existing_atom(value), else: default + if value, do: value |> String.downcase() |> String.to_existing_atom(), else: default end end @@ -32,6 +32,8 @@ db_ssl_ca_cert = System.get_env("DB_SSL_CA_CERT") queue_target = Env.get_integer("DB_QUEUE_TARGET", 5000) queue_interval = Env.get_integer("DB_QUEUE_INTERVAL", 5000) pool_size = Env.get_integer("DB_POOL_SIZE", 5) +master_region = System.get_env("DB_MASTER_REGION") +region = System.get_env("REGION") after_connect_query_args = case System.get_env("DB_AFTER_CONNECT_QUERY") do @@ -67,11 +69,14 @@ janitor_run_after_in_ms = Env.get_integer("JANITOR_RUN_AFTER_IN_MS", :timer.minu janitor_children_timeout = Env.get_integer("JANITOR_CHILDREN_TIMEOUT", :timer.seconds(5)) janitor_schedule_timer = Env.get_integer("JANITOR_SCHEDULE_TIMER_IN_MS", :timer.hours(4)) platform = if System.get_env("AWS_EXECUTION_ENV") == "AWS_ECS_FARGATE", do: :aws, else: :fly - -no_channel_timeout_in_ms = - if config_env() == :test, - do: :timer.seconds(3), - else: Env.get_integer("NO_CHANNEL_TIMEOUT_IN_MS", :timer.minutes(10)) +broadcast_pool_size = Env.get_integer("BROADCAST_POOL_SIZE", 10) +pubsub_adapter = System.get_env("PUBSUB_ADAPTER", "gen_rpc") |> String.to_atom() +websocket_max_heap_size = div(Env.get_integer("WEBSOCKET_MAX_HEAP_SIZE", 50_000_000), :erlang.system_info(:wordsize)) +users_scope_shards = Env.get_integer("USERS_SCOPE_SHARDS", 5) +postgres_cdc_scope_shards = Env.get_integer("POSTGRES_CDC_SCOPE_SHARDS", 5) +regional_broadcasting = Env.get_boolean("REGIONAL_BROADCASTING", false) +no_channel_timeout_in_ms = Env.get_integer("NO_CHANNEL_TIMEOUT_IN_MS", :timer.minutes(10)) +measure_traffic_interval_in_ms = Env.get_integer("MEASURE_TRAFFIC_INTERVAL_IN_MS", :timer.seconds(10)) if !(db_version in [nil, "ipv6", "ipv4"]), do: raise("Invalid IP version, please set either ipv6 or ipv4") @@ -91,6 +96,14 @@ socket_options = end end +[_, node_host] = node() |> Atom.to_string() |> String.split("@") + +metrics_tags = %{ + region: region, + host: node_host, + id: Realtime.Nodes.short_node_id_from_name(node()) +} + config :realtime, Realtime.Repo, hostname: default_db_host, username: username, @@ -106,6 +119,7 @@ config :realtime, Realtime.Repo, ssl: ssl_opts config :realtime, + websocket_max_heap_size: websocket_max_heap_size, migration_partition_slots: migration_partition_slots, connect_partition_slots: connect_partition_slots, rebalance_check_interval_in_ms: rebalance_check_interval_in_ms, @@ -120,7 +134,15 @@ config :realtime, rpc_timeout: rpc_timeout, max_gen_rpc_clients: max_gen_rpc_clients, no_channel_timeout_in_ms: no_channel_timeout_in_ms, - platform: platform + platform: platform, + pubsub_adapter: pubsub_adapter, + broadcast_pool_size: broadcast_pool_size, + users_scope_shards: users_scope_shards, + postgres_cdc_scope_shards: postgres_cdc_scope_shards, + regional_broadcasting: regional_broadcasting, + master_region: master_region, + metrics_tags: metrics_tags, + measure_traffic_interval_in_ms: measure_traffic_interval_in_ms if config_env() != :test && run_janitor? do config :realtime, @@ -270,7 +292,7 @@ if config_env() != :test do metrics_blocklist: System.get_env("METRICS_TOKEN_BLOCKLIST", "") |> String.split(","), metrics_jwt_secret: System.get_env("METRICS_JWT_SECRET"), db_enc_key: System.get_env("DB_ENC_KEY"), - region: System.get_env("REGION"), + region: region, prom_poll_rate: Env.get_integer("PROM_POLL_RATE", 5000), slot_name_suffix: slot_name_suffix end diff --git a/config/test.exs b/config/test.exs index 4c7c66ae8..89f5f001f 100644 --- a/config/test.exs +++ b/config/test.exs @@ -30,7 +30,11 @@ config :realtime, RealtimeWeb.Endpoint, http: [port: 4002], server: true +# that's what config/runtime.exs expects to see as region +System.put_env("REGION", "us-east-1") + config :realtime, + regional_broadcasting: true, region: "us-east-1", db_enc_key: "1234567890123456", jwt_claim_validators: System.get_env("JWT_CLAIM_VALIDATORS", "{}"), @@ -47,7 +51,7 @@ config :logger, # Configures Elixir's Logger config :logger, :console, format: "$time $metadata[$level] $message\n", - metadata: [:request_id, :project, :external_id, :application_name, :sub, :iss, :exp] + metadata: [:error_code, :request_id, :project, :external_id, :application_name, :sub, :iss, :exp] config :opentelemetry, span_processor: :simple, diff --git a/demo/.env.example b/demo/.env.example deleted file mode 100644 index 25edd5cc0..000000000 --- a/demo/.env.example +++ /dev/null @@ -1,4 +0,0 @@ -NEXT_PUBLIC_SUPABASE_URL= -NEXT_PUBLIC_SUPABASE_ANON_KEY= -LOGFLARE_API_KEY= -LOGFLARE_SOURCE_ID= diff --git a/demo/.eslintrc.json b/demo/.eslintrc.json deleted file mode 100644 index bffb357a7..000000000 --- a/demo/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "next/core-web-vitals" -} diff --git a/demo/.gitignore b/demo/.gitignore deleted file mode 100644 index 7d093c39f..000000000 --- a/demo/.gitignore +++ /dev/null @@ -1,38 +0,0 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# next.js -/.next/ -/out/ - -# production -/build - -# misc -.DS_Store -*.pem - -# debug -npm-debug.log* -yarn-debug.log* -yarn-error.log* -.pnpm-debug.log* - -# local env files -.env.local -.env.development.local -.env.test.local -.env.production.local - -# vercel -.vercel - -# typescript -*.tsbuildinfo diff --git a/demo/.prettierignore b/demo/.prettierignore deleted file mode 100644 index ba898f1ef..000000000 --- a/demo/.prettierignore +++ /dev/null @@ -1,3 +0,0 @@ -.next -node_modules -package-lock.json diff --git a/demo/.prettierrc.json b/demo/.prettierrc.json deleted file mode 100644 index 8df6df775..000000000 --- a/demo/.prettierrc.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "trailingComma": "es5", - "tabWidth": 2, - "semi": false, - "singleQuote": true, - "printWidth": 100 -} diff --git a/demo/README.md b/demo/README.md deleted file mode 100644 index c87e0421d..000000000 --- a/demo/README.md +++ /dev/null @@ -1,34 +0,0 @@ -This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). - -## Getting Started - -First, run the development server: - -```bash -npm run dev -# or -yarn dev -``` - -Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. - -You can start editing the page by modifying `pages/index.tsx`. The page auto-updates as you edit the file. - -[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.ts`. - -The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages. - -## Learn More - -To learn more about Next.js, take a look at the following resources: - -- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. -- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. - -You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! - -## Deploy on Vercel - -The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. - -Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. diff --git a/demo/client.ts b/demo/client.ts deleted file mode 100644 index c39f3982d..000000000 --- a/demo/client.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { createClient } from '@supabase/supabase-js' - -const supabaseClient = createClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - { - realtime: { - params: { - eventsPerSecond: 1000, - }, - }, - } -) - -export default supabaseClient diff --git a/demo/components/Chatbox.tsx b/demo/components/Chatbox.tsx deleted file mode 100644 index 0f9695537..000000000 --- a/demo/components/Chatbox.tsx +++ /dev/null @@ -1,47 +0,0 @@ -import { IconLoader } from '@supabase/ui' -import { FC, RefObject } from 'react' -import { Message } from '../types' - -interface Props { - messages: Message[] - chatboxRef: RefObject - messagesInTransit: string[] - areMessagesFetched: boolean -} - -const Chatbox: FC = ({ messages, chatboxRef, messagesInTransit, areMessagesFetched }) => { - return ( -
-
- {!areMessagesFetched ? ( -
- -

Loading messages

-
- ) : messages.length === 0 && messagesInTransit.length === 0 ? ( -
- Type anything to start chatting 🥳 -
- ) : ( -
- )} - {messages.map((message) => ( -

- {message.message} -

- ))} - {messagesInTransit.map((message, idx: number) => ( -

- {message} -

- ))} -
-
-
- ) -} - -export default Chatbox diff --git a/demo/components/Cursor.tsx b/demo/components/Cursor.tsx deleted file mode 100644 index f6ad32ea6..000000000 --- a/demo/components/Cursor.tsx +++ /dev/null @@ -1,140 +0,0 @@ -import { FC, FormEvent, useEffect, useRef, useState } from 'react' - -interface Props { - x?: number - y?: number - color: string - hue: string - message: string - isTyping: boolean - isCancelled?: boolean - isLocalClient?: boolean - onUpdateMessage?: (message: string) => void -} - -const MAX_MESSAGE_LENGTH = 70 -const MAX_DURATION = 4000 -const MAX_BUBBLE_WIDTH_THRESHOLD = 280 + 50 -const MAX_BUBBLE_HEIGHT_THRESHOLD = 40 + 50 - -const Cursor: FC = ({ - x, - y, - color, - hue, - message, - isTyping, - isCancelled, - isLocalClient, - onUpdateMessage = () => {}, -}) => { - // Don't show cursor for the local client - const _isLocalClient = !x || !y || isLocalClient - const inputRef = useRef() as any - const timeoutRef = useRef() as any - const chatBubbleRef = useRef() as any - - const [flipX, setFlipX] = useState(false) - const [flipY, setFlipY] = useState(false) - const [hideInput, setHideInput] = useState(false) - const [showMessageBubble, setShowMessageBubble] = useState(false) - - useEffect(() => { - if (isTyping) { - setShowMessageBubble(true) - if (timeoutRef.current) clearTimeout(timeoutRef.current) - - if (isLocalClient) { - if (inputRef.current) inputRef.current.focus() - setHideInput(false) - } - } else { - if (!message || isCancelled) { - setShowMessageBubble(false) - } else { - if (timeoutRef.current) clearTimeout(timeoutRef.current) - if (isLocalClient) setHideInput(true) - const timeoutId = setTimeout(() => { - setShowMessageBubble(false) - }, MAX_DURATION) - timeoutRef.current = timeoutId - } - } - }, [isLocalClient, isTyping, isCancelled, message, inputRef]) - - useEffect(() => { - // [Joshen] Experimental: dynamic flipping to ensure that chat - // bubble always stays within the viewport, comment this block - // out if the effect seems weird. - setFlipX((x || 0) + MAX_BUBBLE_WIDTH_THRESHOLD >= window.innerWidth) - setFlipY((y || 0) + MAX_BUBBLE_HEIGHT_THRESHOLD >= window.innerHeight) - }, [x, y, isTyping, chatBubbleRef]) - - return ( - <> - {!_isLocalClient && ( - - - - )} -
- {_isLocalClient && !hideInput ? ( - <> - ) => { - const text = e.currentTarget.value - if (text.length <= MAX_MESSAGE_LENGTH) onUpdateMessage(e.currentTarget.value) - }} - /> -

- {message.length}/{MAX_MESSAGE_LENGTH} -

- - ) : message.length ? ( -
{message}
- ) : ( -
-
-
-
-
-
-
-
- )} -
- - ) -} - -export default Cursor diff --git a/demo/components/DarkModeToggle.tsx b/demo/components/DarkModeToggle.tsx deleted file mode 100644 index c84626a90..000000000 --- a/demo/components/DarkModeToggle.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import { IconSun, IconMoon } from '@supabase/ui' -import { useEffect } from 'react' -import { useTheme } from '../lib/ThemeProvider' - -function DarkModeToggle() { - const { isDarkMode, toggleTheme } = useTheme() - - const toggleDarkMode = () => { - localStorage.setItem('supabaseDarkMode', (!isDarkMode).toString()) - toggleTheme() - - const key = localStorage.getItem('supabaseDarkMode') - document.documentElement.className = key === 'true' ? 'dark' : '' - } - - useEffect(() => { - const key = localStorage.getItem('supabaseDarkMode') - if (key && key == 'false') { - document.documentElement.className = '' - } - }, []) - - return ( -
- -
- ) -} - -export default DarkModeToggle diff --git a/demo/components/Loader.tsx b/demo/components/Loader.tsx deleted file mode 100644 index ee7675512..000000000 --- a/demo/components/Loader.tsx +++ /dev/null @@ -1,12 +0,0 @@ -const Loader = () => { - return ( -
- - - - -
- ) -} - -export default Loader diff --git a/demo/components/Users.tsx b/demo/components/Users.tsx deleted file mode 100644 index 67051321b..000000000 --- a/demo/components/Users.tsx +++ /dev/null @@ -1,38 +0,0 @@ -import { FC } from 'react' -import { User } from '../types' - -interface Props { - users: Record -} - -const Users: FC = ({ users }) => { - return ( -
- {Object.entries(users).map(([userId, userData], idx) => { - return ( -
-
-
-
-
- ) - })} -
- ) -} - -export default Users diff --git a/demo/components/WaitlistPopover.tsx b/demo/components/WaitlistPopover.tsx deleted file mode 100644 index 7958af6cf..000000000 --- a/demo/components/WaitlistPopover.tsx +++ /dev/null @@ -1,180 +0,0 @@ -import { FC, useState, memo } from 'react' -import Link from 'next/link' -import Image from 'next/image' -import { - Button, - Form, - Input, - IconMinimize2, - IconMaximize2, - IconGitHub, - IconTwitter, -} from '@supabase/ui' -import supabaseClient from '../client' -import { useTheme } from '../lib/ThemeProvider' - -interface Props {} - -const WaitlistPopover: FC = ({}) => { - const { isDarkMode } = useTheme() - const [isExpanded, setIsExpanded] = useState(true) - const [isSuccess, setIsSuccess] = useState(false) - const [error, setError] = useState() - - const initialValues = { email: '' } - - const getGeneratedTweet = () => { - return `Join me to experience Realtime by Supabase!%0A%0A${window.location.href}` - } - - const onValidate = (values: any) => { - const errors = {} as any - const emailValidateRegex = - /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$/ - if (!emailValidateRegex.test(values.email)) errors.email = 'Please enter a valid email' - return errors - } - - const onSubmit = async (values: any, { setSubmitting, resetForm }: any) => { - setIsSuccess(false) - setError(undefined) - setSubmitting(true) - const { error } = await supabaseClient.from('waitlist').insert([{ email: values.email }]) - if (!error) { - resetForm() - setIsSuccess(true) - } else { - setError(error) - } - setSubmitting(false) - } - - return ( -
-
-
- supabase -
-

- / -

-

- Realtime -

-
-
- {isExpanded ? ( - setIsExpanded(false)} - /> - ) : ( - setIsExpanded(true)} - /> - )} -
- -
-
-
-

Realtime

-
-

- Realtime collaborative app to display broadcast, presence, and database listening over - WebSockets -

-
-
- - Realtime Multiplayer by Supabase - Easily build real-time apps that enables user collaboration | Product Hunt - -
-
- - - - - - -
-
- -
- {({ isSubmitting }: any) => { - return ( - <> - - Get early access - , - ]} - /> - {isSuccess && ( -

- Thank you for submitting your interest! -

- )} - {error?.message.includes('duplicate key') && ( -

- Email has already been registered for waitlist -

- )} - {error && !error?.message.includes('duplicate key') && ( -

Unable to register email for waitlist

- )} - - ) - }} -
-
- ) -} - -export default memo(WaitlistPopover) diff --git a/demo/lib/RandomColor.ts b/demo/lib/RandomColor.ts deleted file mode 100644 index beea3b369..000000000 --- a/demo/lib/RandomColor.ts +++ /dev/null @@ -1,56 +0,0 @@ -import sampleSize from 'lodash.samplesize' - -const colors = { - tomato: { - bg: 'var(--colors-tomato9)', - hue: 'var(--colors-tomato7)', - }, - crimson: { - bg: 'var(--colors-crimson9)', - hue: 'var(--colors-crimson7)', - }, - pink: { - bg: 'var(--colors-pink9)', - hue: 'var(--colors-pink7)', - }, - plum: { - bg: 'var(--colors-plum9)', - hue: 'var(--colors-plum7)', - }, - indigo: { - bg: 'var(--colors-indigo9)', - hue: 'var(--colors-indigo7)', - }, - blue: { - bg: 'var(--colors-blue9)', - hue: 'var(--colors-blue7)', - }, - cyan: { - bg: 'var(--colors-cyan9)', - hue: 'var(--colors-cyan7)', - }, - green: { - bg: 'var(--colors-green9)', - hue: 'var(--colors-green7)', - }, - orange: { - bg: 'var(--colors-orange9)', - hue: 'var(--colors-orange7)', - }, -} - -export const getRandomUniqueColor = (currentColors: string[]) => { - const colorNames = Object.values(colors).map((col) => col.bg) - const uniqueColors = colorNames.filter((color: string) => !currentColors.includes(color)) - const uniqueColor = uniqueColors[Math.floor(Math.random() * uniqueColors.length)] - const uniqueColorSet = Object.values(colors).find((color) => color.bg === uniqueColor) - return uniqueColorSet || getRandomColor() -} - -export const getRandomColors = (qty: number) => { - return sampleSize(Object.values(colors), qty) -} - -export const getRandomColor = () => { - return Object.values(colors)[Math.floor(Math.random() * Object.values(colors).length)] -} diff --git a/demo/lib/ThemeProvider.tsx b/demo/lib/ThemeProvider.tsx deleted file mode 100644 index ab023562c..000000000 --- a/demo/lib/ThemeProvider.tsx +++ /dev/null @@ -1,44 +0,0 @@ -import { createContext, useContext, useEffect, useState } from 'react' - -interface UseThemeProps { - isDarkMode?: boolean - toggleTheme: () => void -} - -interface ThemeProviderProps { - children?: any -} - -export const ThemeContext = createContext({ - isDarkMode: true, - toggleTheme: () => {}, -}) - -export const useTheme = () => useContext(ThemeContext) - -export const ThemeProvider = ({ children }: ThemeProviderProps) => { - const [isDarkMode, setIsDarkMode] = useState(false) - - useEffect(() => { - const key = localStorage.getItem('supabaseDarkMode') - // Default to dark mode if no preference config - setIsDarkMode(!key || key === 'true') - }, []) - - const toggleTheme = () => { - setIsDarkMode(!isDarkMode) - } - - return ( - <> - - {children} - - - ) -} diff --git a/demo/lib/sendLog.ts b/demo/lib/sendLog.ts deleted file mode 100644 index c3c7e1ab5..000000000 --- a/demo/lib/sendLog.ts +++ /dev/null @@ -1,9 +0,0 @@ -export function sendLog(message: string) { - return fetch('/api/log', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ message }), - }) -} diff --git a/demo/next-env.d.ts b/demo/next-env.d.ts deleted file mode 100644 index 4f11a03dc..000000000 --- a/demo/next-env.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -/// -/// - -// NOTE: This file should not be edited -// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/demo/next.config.js b/demo/next.config.js deleted file mode 100644 index b556a415f..000000000 --- a/demo/next.config.js +++ /dev/null @@ -1,14 +0,0 @@ -/** @type {import('next').NextConfig} */ -const nextConfig = { - async rewrites() { - return [ - { - source: '/', - destination: '/room', - }, - ] - }, - reactStrictMode: true, -} - -module.exports = nextConfig diff --git a/demo/package-lock.json b/demo/package-lock.json deleted file mode 100644 index bcf6da697..000000000 --- a/demo/package-lock.json +++ /dev/null @@ -1,10978 +0,0 @@ -{ - "name": "demo", - "version": "0.1.2", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "demo", - "version": "0.1.2", - "dependencies": { - "@supabase/supabase-js": "^2.1.0", - "@supabase/ui": "0.37.0-alpha.81", - "lodash.clonedeep": "^4.5.0", - "lodash.samplesize": "^4.2.0", - "lodash.throttle": "^4.1.1", - "next": "^15.2.4", - "react": "17.0.2", - "react-dom": "17.0.2" - }, - "devDependencies": { - "@types/lodash.clonedeep": "^4.5.6", - "@types/lodash.samplesize": "^4.2.6", - "@types/lodash.throttle": "^4.1.6", - "@types/node": "17.0.21", - "@types/react": "17.0.41", - "autoprefixer": "^10.4.4", - "eslint": "8.11.0", - "eslint-config-next": "^12.3.4", - "postcss": "^8.4.31", - "tailwindcss": "^3.0.23", - "typescript": "4.6.2" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.16.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", - "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", - "dependencies": { - "@babel/highlight": "^7.16.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.16.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", - "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.16.10", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", - "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", - "dependencies": { - "@babel/helper-validator-identifier": "^7.16.7", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "node_modules/@babel/highlight/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/runtime": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.0.tgz", - "integrity": "sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==", - "license": "MIT", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/runtime-corejs3": { - "version": "7.20.1", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.20.1.tgz", - "integrity": "sha512-CGulbEDcg/ND1Im7fUNRZdGXmX2MTWVVZacQi/6DiKE5HNwZ3aVTm5PV4lO8HHz0B2h8WQyvKKjbX5XgTtydsg==", - "dev": true, - "dependencies": { - "core-js-pure": "^3.25.1", - "regenerator-runtime": "^0.13.10" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/runtime/node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "license": "MIT" - }, - "node_modules/@emnapi/runtime": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.3.tgz", - "integrity": "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ==", - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", - "integrity": "sha512-bxvbYnBPN1Gibwyp6NrpnFzA3YtRL3BBAyEAFVIpNTm2Rn4Vy87GA5M4aSn3InRrlsbX5N0GW7XIx+U4SAEKdQ==", - "dev": true, - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.3.1", - "globals": "^13.9.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/@headlessui/react": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.4.tgz", - "integrity": "sha512-D8n5yGCF3WIkPsjEYeM8knn9jQ70bigGGb5aUvN6y4BGxcT3OcOQOKcM3zRGllRCZCFxCZyQvYJF6ZE7bQUOyQ==", - "dependencies": { - "client-only": "^0.0.1" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "react": "^16 || ^17 || ^18", - "react-dom": "^16 || ^17 || ^18" - } - }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.9.5", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz", - "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==", - "dev": true, - "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true - }, - "node_modules/@img/sharp-darwin-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", - "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", - "cpu": [ - "arm64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-darwin-arm64": "1.0.4" - } - }, - "node_modules/@img/sharp-darwin-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", - "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-darwin-x64": "1.0.4" - } - }, - "node_modules/@img/sharp-libvips-darwin-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", - "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", - "cpu": [ - "arm64" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "darwin" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-darwin-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", - "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", - "cpu": [ - "x64" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "darwin" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-arm": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", - "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", - "cpu": [ - "arm" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", - "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", - "cpu": [ - "arm64" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-s390x": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", - "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", - "cpu": [ - "s390x" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", - "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", - "cpu": [ - "x64" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linuxmusl-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", - "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", - "cpu": [ - "arm64" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linuxmusl-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", - "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", - "cpu": [ - "x64" - ], - "license": "LGPL-3.0-or-later", - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-linux-arm": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", - "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", - "cpu": [ - "arm" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-arm": "1.0.5" - } - }, - "node_modules/@img/sharp-linux-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", - "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", - "cpu": [ - "arm64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-arm64": "1.0.4" - } - }, - "node_modules/@img/sharp-linux-s390x": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", - "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", - "cpu": [ - "s390x" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-s390x": "1.0.4" - } - }, - "node_modules/@img/sharp-linux-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", - "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-x64": "1.0.4" - } - }, - "node_modules/@img/sharp-linuxmusl-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", - "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", - "cpu": [ - "arm64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" - } - }, - "node_modules/@img/sharp-linuxmusl-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", - "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-x64": "1.0.4" - } - }, - "node_modules/@img/sharp-wasm32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", - "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", - "cpu": [ - "wasm32" - ], - "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", - "optional": true, - "dependencies": { - "@emnapi/runtime": "^1.2.0" - }, - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-win32-ia32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", - "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", - "cpu": [ - "ia32" - ], - "license": "Apache-2.0 AND LGPL-3.0-or-later", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-win32-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", - "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0 AND LGPL-3.0-or-later", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@mertasan/tailwindcss-variables": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@mertasan/tailwindcss-variables/-/tailwindcss-variables-2.5.1.tgz", - "integrity": "sha512-I1Jvpu5fcinGT/yEDL53dRXznFWV4LoTCUVcTvQqA1YH1iAfs72OO/VZdBKPqcxe/lS2nBr/Ikloe+pLsxemmA==", - "dependencies": { - "lodash": "^4.17.21" - }, - "engines": { - "node": ">=12.13.0" - }, - "peerDependencies": { - "autoprefixer": "^10.0.2", - "postcss": "^8.0.9" - } - }, - "node_modules/@next/env": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/env/-/env-15.2.4.tgz", - "integrity": "sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==", - "license": "MIT" - }, - "node_modules/@next/eslint-plugin-next": { - "version": "12.3.4", - "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.4.tgz", - "integrity": "sha512-BFwj8ykJY+zc1/jWANsDprDIu2MgwPOIKxNVnrKvPs+f5TPegrVnem8uScND+1veT4B7F6VeqgaNLFW1Hzl9Og==", - "dev": true, - "dependencies": { - "glob": "7.1.7" - } - }, - "node_modules/@next/eslint-plugin-next/node_modules/glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@next/swc-darwin-arm64": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.2.4.tgz", - "integrity": "sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-darwin-x64": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.2.4.tgz", - "integrity": "sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-gnu": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.2.4.tgz", - "integrity": "sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-musl": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.2.4.tgz", - "integrity": "sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-gnu": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.2.4.tgz", - "integrity": "sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-musl": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.2.4.tgz", - "integrity": "sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-arm64-msvc": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.2.4.tgz", - "integrity": "sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-x64-msvc": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.2.4.tgz", - "integrity": "sha512-SbnWkJmkS7Xl3kre8SdMF6F/XDh1DTFEhp0jRTj/uB8iPKoU2bb2NDfcu+iifv1+mxQEd1g2vvSxcZbXSKyWiQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@radix-ui/colors": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/@radix-ui/colors/-/colors-0.1.8.tgz", - "integrity": "sha512-jwRMXYwC0hUo0mv6wGpuw254Pd9p/R6Td5xsRpOmaWkUHlooNWqVcadgyzlRumMq3xfOTXwJReU0Jv+EIy4Jbw==" - }, - "node_modules/@radix-ui/popper": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/popper/-/popper-0.1.0.tgz", - "integrity": "sha512-uzYeElL3w7SeNMuQpXiFlBhTT+JyaNMCwDfjKkrzugEcYrf5n52PHqncNdQPUtR42hJh8V9FsqyEDbDxkeNjJQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "csstype": "^3.0.4" - } - }, - "node_modules/@radix-ui/primitive": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-0.1.0.tgz", - "integrity": "sha512-tqxZKybwN5Fa3VzZry4G6mXAAb9aAqKmPtnVbZpL0vsBwvOHTBwsjHVPXylocYLwEtBY9SCe665bYnNB515uoA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@radix-ui/react-presence": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-0.1.2.tgz", - "integrity": "sha512-3BRlFZraooIUfRlyN+b/Xs5hq1lanOOo/+3h6Pwu2GMFjkGKKa4Rd51fcqGqnVlbr3jYg+WLuGyAV4KlgqwrQw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": ">=16.8" - } - }, - "node_modules/@radix-ui/react-presence/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@radix-ui/react-presence/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@radix-ui/rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-0.1.1.tgz", - "integrity": "sha512-g3hnE/UcOg7REdewduRPAK88EPuLZtaq7sA9ouu8S+YEtnyFRI16jgv6GZYe3VMoQLL1T171ebmEPtDjyxWLzw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@rushstack/eslint-patch": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", - "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==", - "dev": true - }, - "node_modules/@supabase/functions-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.0.0.tgz", - "integrity": "sha512-ozb7bds2yvf5k7NM2ZzUkxvsx4S4i2eRKFSJetdTADV91T65g4gCzEs9L3LUXSrghcGIkUaon03VPzOrFredqg==", - "dependencies": { - "cross-fetch": "^3.1.5" - } - }, - "node_modules/@supabase/gotrue-js": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/@supabase/gotrue-js/-/gotrue-js-2.3.1.tgz", - "integrity": "sha512-txYVDrKAFXxT4nyVGnW3M9Oid4u3Xe/Na+wTEzwU+IBuPUEz72ZBHNKo6HBKlZNpnlGtgCSciYhH8qFkZYGV3g==", - "dependencies": { - "cross-fetch": "^3.1.5" - } - }, - "node_modules/@supabase/postgrest-js": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.1.0.tgz", - "integrity": "sha512-qkY8TqIu5sJuae8gjeDPjEqPrefzcTraW9PNSVJQHq4TEv98ZmwaXGwBGz0bVL63bqrGA5hqREbQHkANUTXrvA==", - "dependencies": { - "cross-fetch": "^3.1.5" - } - }, - "node_modules/@supabase/realtime-js": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.1.0.tgz", - "integrity": "sha512-iplLCofTeYjnx9FIOsIwHLhMp0+7UVyiA4/sCeq40VdOgN9eTIhjEno9Tgh4dJARi4aaXoKfRX1DTxgZaOpPAw==", - "dependencies": { - "@types/phoenix": "^1.5.4", - "websocket": "^1.0.34" - } - }, - "node_modules/@supabase/storage-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.0.0.tgz", - "integrity": "sha512-7kXThdRt/xqnOOvZZxBqNkeX1CFNUWc0hYBJtNN/Uvt8ok9hD14foYmroWrHn046wEYFqUrB9U35JYsfTrvltA==", - "dependencies": { - "cross-fetch": "^3.1.5" - } - }, - "node_modules/@supabase/supabase-js": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.1.0.tgz", - "integrity": "sha512-hODrAUDSC6RV6EhwuSMyhaQCF32gij0EBTceuDR+8suJsg7XcyUG0fYgeYecWIvt0nz61xAMY6E+Ywb0tJaAng==", - "dependencies": { - "@supabase/functions-js": "^2.0.0", - "@supabase/gotrue-js": "^2.3.0", - "@supabase/postgrest-js": "^1.1.0", - "@supabase/realtime-js": "^2.1.0", - "@supabase/storage-js": "^2.0.0", - "cross-fetch": "^3.1.5" - } - }, - "node_modules/@supabase/ui": { - "version": "0.37.0-alpha.81", - "resolved": "https://registry.npmjs.org/@supabase/ui/-/ui-0.37.0-alpha.81.tgz", - "integrity": "sha512-CxqdikE6wGw6pGQ6b3vRA8qnvCK20VyeMyy8Z4hJ/Dg2qRfgQqbrv7qS+6A1S8pg657EzCCo0DIH75SijaU8eA==", - "dependencies": { - "@headlessui/react": "^1.0.0", - "@mertasan/tailwindcss-variables": "^2.0.1", - "@radix-ui/colors": "^0.1.8", - "@radix-ui/react-accordion": "^0.1.5", - "@radix-ui/react-collapsible": "^0.1.5", - "@radix-ui/react-context-menu": "^0.1.0", - "@radix-ui/react-dialog": "^0.1.5", - "@radix-ui/react-dropdown-menu": "^0.1.4", - "@radix-ui/react-popover": "^0.1.0", - "@radix-ui/react-portal": "^0.1.3", - "@radix-ui/react-tabs": "^0.1.0", - "@tailwindcss/forms": "^0.4.0", - "@tailwindcss/typography": "^0.5.0", - "autoprefixer": "^10.4.2", - "deepmerge": "^4.2.2", - "formik": "^2.2.9", - "lodash": "^4.17.20", - "postcss": "^8.4.5", - "prop-types": "^15.7.2", - "tailwindcss": "^3.0.15", - "tailwindcss-radix": "^1.6.0" - }, - "optionalDependencies": { - "fsevents": "^2.3.2" - }, - "peerDependencies": { - "react": "^16.13.1 || ^17.0.1", - "react-dom": "^16.13.1 || ^17.0.1" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-accordion/-/react-accordion-0.1.6.tgz", - "integrity": "sha512-LOXlqPU6y6EMBopdRIKCWFvMPY1wPTQ4uJiX7ZVxldrMJcM7imBzI3wlRTkPCHZ3FLHmpuw+cQi3du23pzJp1g==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collapsible": "0.1.6", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-id/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-0.1.6.tgz", - "integrity": "sha512-Gkf8VuqMc6HTLzA2AxVYnyK6aMczVLpatCjdD9Lj4wlYLXCz9KtiqZYslLMeqnQFLwLyZS0WKX/pQ8j5fioIBw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context-menu/-/react-context-menu-0.1.6.tgz", - "integrity": "sha512-0qa6ABaeqD+WYI+8iT0jH0QLLcV8Kv0xI+mZL4FFnG4ec9H0v+yngb5cfBBfs9e/KM8mDzFFpaeegqsQlLNqyQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-menu": "0.1.6", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-0.1.6.tgz", - "integrity": "sha512-ho3+bhpr3oAFkOBJ8VkUb1BcGoiZBB3OmcWPqa6i5RTUKrzNX/d6rauochu2xDlWjiRtpVuiAcsTVOeIC4FbYQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-popper": "0.1.4", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-roving-focus": "0.1.5", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-direction": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-id/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-0.1.4.tgz", - "integrity": "sha512-18gDYof97t8UQa7zwklG1Dr8jIdj3u+rVOQLzPi9f5i1YQak/pVGkaqw8aY+iDUknKKuZniTk/7jbAJUYlKyOw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/popper": "0.1.0", - "@radix-ui/react-arrow": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-rect": "0.1.1", - "@radix-ui/react-use-size": "0.1.1", - "@radix-ui/rect": "0.1.1" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-arrow": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-0.1.4.tgz", - "integrity": "sha512-BB6XzAb7Ml7+wwpFdYVtZpK1BlMgqyafSQNGzhIpSZ4uXvXOHPlR5GP8M449JkeQzgQjv9Mp1AsJxFC0KuOtuA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-use-rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-0.1.1.tgz", - "integrity": "sha512-kHNNXAsP3/PeszEmM/nxBBS9Jbo93sO+xuMTcRfwzXsmxT5gDXQzAiKbZQ0EecCPtJIzqvr7dlaQi/aP1PKYqQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "0.1.1" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-use-size": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-0.1.1.tgz", - "integrity": "sha512-pTgWM5qKBu6C7kfKxrKPoBI2zZYZmp2cSXzpUiGM3qEBQlMLtYhaY2JXdXUCxz+XmD1YEjc8oRwvyfsD4AG4WA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-roving-focus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-0.1.5.tgz", - "integrity": "sha512-ClwKPS5JZE+PaHCoW7eu1onvE61pDv4kO8W4t5Ra3qMFQiTJLZMdpBQUhksN//DaVygoLirz4Samdr5Y1x1FSA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-use-direction": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-direction/-/react-use-direction-0.1.0.tgz", - "integrity": "sha512-NajpY/An9TCPSfOVkgWIdXJV+VuWl67PxB6kOKYmtNAFHvObzIoh8o0n9sAuwSAyFCZVq211FEf9gvVDRhOyiA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-context-menu/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-0.1.7.tgz", - "integrity": "sha512-jXt8srGhHBRvEr9jhEAiwwJzWCWZoGRJ030aC9ja/gkRJbZdy0iD3FwXf+Ff4RtsZyLUMHW7VUwFOlz3Ixe1Vw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2", - "@radix-ui/react-use-controllable-state": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-scope/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-id/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-0.1.6.tgz", - "integrity": "sha512-RZhtzjWwJ4ZBN7D8ek4Zn+ilHzYuYta9yIxFnbC0pfqMnSi67IQNONo1tuuNqtFh9SRHacPKc65zo+kBBlxtdg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-menu": "0.1.6", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-id/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-0.1.6.tgz", - "integrity": "sha512-ho3+bhpr3oAFkOBJ8VkUb1BcGoiZBB3OmcWPqa6i5RTUKrzNX/d6rauochu2xDlWjiRtpVuiAcsTVOeIC4FbYQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-popper": "0.1.4", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-roving-focus": "0.1.5", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-direction": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-0.1.4.tgz", - "integrity": "sha512-18gDYof97t8UQa7zwklG1Dr8jIdj3u+rVOQLzPi9f5i1YQak/pVGkaqw8aY+iDUknKKuZniTk/7jbAJUYlKyOw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/popper": "0.1.0", - "@radix-ui/react-arrow": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-rect": "0.1.1", - "@radix-ui/react-use-size": "0.1.1", - "@radix-ui/rect": "0.1.1" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-arrow": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-0.1.4.tgz", - "integrity": "sha512-BB6XzAb7Ml7+wwpFdYVtZpK1BlMgqyafSQNGzhIpSZ4uXvXOHPlR5GP8M449JkeQzgQjv9Mp1AsJxFC0KuOtuA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-use-rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-0.1.1.tgz", - "integrity": "sha512-kHNNXAsP3/PeszEmM/nxBBS9Jbo93sO+xuMTcRfwzXsmxT5gDXQzAiKbZQ0EecCPtJIzqvr7dlaQi/aP1PKYqQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "0.1.1" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-use-size": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-0.1.1.tgz", - "integrity": "sha512-pTgWM5qKBu6C7kfKxrKPoBI2zZYZmp2cSXzpUiGM3qEBQlMLtYhaY2JXdXUCxz+XmD1YEjc8oRwvyfsD4AG4WA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-roving-focus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-0.1.5.tgz", - "integrity": "sha512-ClwKPS5JZE+PaHCoW7eu1onvE61pDv4kO8W4t5Ra3qMFQiTJLZMdpBQUhksN//DaVygoLirz4Samdr5Y1x1FSA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-use-direction": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-direction/-/react-use-direction-0.1.0.tgz", - "integrity": "sha512-NajpY/An9TCPSfOVkgWIdXJV+VuWl67PxB6kOKYmtNAFHvObzIoh8o0n9sAuwSAyFCZVq211FEf9gvVDRhOyiA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-0.1.6.tgz", - "integrity": "sha512-zQzgUqW4RQDb0ItAL1xNW4K4olUrkfV3jeEPs9rG+nsDQurO+W9TT+YZ9H1mmgAJqlthyv1sBRZGdBm4YjtD6Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-popper": "0.1.4", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-body-pointer-events/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-focus-scope/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-id/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-popper": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-0.1.4.tgz", - "integrity": "sha512-18gDYof97t8UQa7zwklG1Dr8jIdj3u+rVOQLzPi9f5i1YQak/pVGkaqw8aY+iDUknKKuZniTk/7jbAJUYlKyOw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/popper": "0.1.0", - "@radix-ui/react-arrow": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-rect": "0.1.1", - "@radix-ui/react-use-size": "0.1.1", - "@radix-ui/rect": "0.1.1" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-arrow": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-0.1.4.tgz", - "integrity": "sha512-BB6XzAb7Ml7+wwpFdYVtZpK1BlMgqyafSQNGzhIpSZ4uXvXOHPlR5GP8M449JkeQzgQjv9Mp1AsJxFC0KuOtuA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-use-rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-0.1.1.tgz", - "integrity": "sha512-kHNNXAsP3/PeszEmM/nxBBS9Jbo93sO+xuMTcRfwzXsmxT5gDXQzAiKbZQ0EecCPtJIzqvr7dlaQi/aP1PKYqQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "0.1.1" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-use-size": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-0.1.1.tgz", - "integrity": "sha512-pTgWM5qKBu6C7kfKxrKPoBI2zZYZmp2cSXzpUiGM3qEBQlMLtYhaY2JXdXUCxz+XmD1YEjc8oRwvyfsD4AG4WA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-portal": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-0.1.4.tgz", - "integrity": "sha512-MO0wRy2eYRTZ/CyOri9NANCAtAtq89DEtg90gicaTlkCfdqCLEBsLb+/q66BZQTr3xX/Vq01nnVfc/TkCqoqvw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0", - "react-dom": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-portal/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-portal/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-portal/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-portal/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-0.1.5.tgz", - "integrity": "sha512-ieVQS1TFr0dX1XA8B+CsSFKOE7kcgEaNWWEfItxj9D1GZjn1o3WqPkW+FhQWDAWZLSKCH2PezYF3MNyO41lgJg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-roving-focus": "0.1.5", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-id/node_modules/@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-roving-focus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-0.1.5.tgz", - "integrity": "sha512-ClwKPS5JZE+PaHCoW7eu1onvE61pDv4kO8W4t5Ra3qMFQiTJLZMdpBQUhksN//DaVygoLirz4Samdr5Y1x1FSA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@supabase/ui/node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0" - } - }, - "node_modules/@swc/counter": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", - "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", - "license": "Apache-2.0" - }, - "node_modules/@swc/helpers": { - "version": "0.5.15", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", - "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.8.0" - } - }, - "node_modules/@tailwindcss/forms": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.4.1.tgz", - "integrity": "sha512-gS9xjCmJjUBz/eP12QlENPLnf0tCx68oYE3mri0GMP5jdtVwLbGUNSRpjsp6NzLAZzZy3ueOwrcqB78Ax6Z84A==", - "dependencies": { - "mini-svg-data-uri": "^1.2.3" - }, - "peerDependencies": { - "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1" - } - }, - "node_modules/@tailwindcss/typography": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.8.tgz", - "integrity": "sha512-xGQEp8KXN8Sd8m6R4xYmwxghmswrd0cPnNI2Lc6fmrC3OojysTBJJGSIVwPV56q4t6THFUK3HJ0EaWwpglSxWw==", - "dependencies": { - "lodash.castarray": "^4.4.0", - "lodash.isplainobject": "^4.0.6", - "lodash.merge": "^4.6.2", - "postcss-selector-parser": "6.0.10" - }, - "peerDependencies": { - "tailwindcss": ">=3.0.0 || insiders" - } - }, - "node_modules/@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true - }, - "node_modules/@types/lodash": { - "version": "4.14.180", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.180.tgz", - "integrity": "sha512-XOKXa1KIxtNXgASAnwj7cnttJxS4fksBRywK/9LzRV5YxrF80BXZIGeQSuoESQ/VkUj30Ae0+YcuHc15wJCB2g==", - "dev": true - }, - "node_modules/@types/lodash.clonedeep": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@types/lodash.clonedeep/-/lodash.clonedeep-4.5.6.tgz", - "integrity": "sha512-cE1jYr2dEg1wBImvXlNtp0xDoS79rfEdGozQVgliDZj1uERH4k+rmEMTudP9b4VQ8O6nRb5gPqft0QzEQGMQgA==", - "dev": true, - "dependencies": { - "@types/lodash": "*" - } - }, - "node_modules/@types/lodash.samplesize": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@types/lodash.samplesize/-/lodash.samplesize-4.2.6.tgz", - "integrity": "sha512-yBgEuIxVIM+corHdvB+NHgzni1Oc0aEd7acuO/jET0vO2Y2f6sl7vfQlaZKgzcN+ZqWLB6B2VQTKc1T5zQra+Q==", - "dev": true, - "dependencies": { - "@types/lodash": "*" - } - }, - "node_modules/@types/lodash.throttle": { - "version": "4.1.6", - "resolved": "https://registry.npmjs.org/@types/lodash.throttle/-/lodash.throttle-4.1.6.tgz", - "integrity": "sha512-/UIH96i/sIRYGC60NoY72jGkCJtFN5KVPhEMMMTjol65effe1gPn0tycJqV5tlSwMTzX8FqzB5yAj0rfGHTPNg==", - "dev": true, - "dependencies": { - "@types/lodash": "*" - } - }, - "node_modules/@types/node": { - "version": "17.0.21", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz", - "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==", - "dev": true - }, - "node_modules/@types/parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" - }, - "node_modules/@types/phoenix": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.5.4.tgz", - "integrity": "sha512-L5eZmzw89eXBKkiqVBcJfU1QGx9y+wurRIEgt0cuLH0hwNtVUxtx+6cu0R2STwWj468sjXyBYPYDtGclUd1kjQ==" - }, - "node_modules/@types/prop-types": { - "version": "15.7.4", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.4.tgz", - "integrity": "sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ==", - "devOptional": true - }, - "node_modules/@types/react": { - "version": "17.0.41", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.41.tgz", - "integrity": "sha512-chYZ9ogWUodyC7VUTRBfblysKLjnohhFY9bGLwvnUFFy48+vB9DikmB3lW0qTFmBcKSzmdglcvkHK71IioOlDA==", - "devOptional": true, - "dependencies": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - }, - "node_modules/@types/scheduler": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", - "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==", - "devOptional": true - }, - "node_modules/@typescript-eslint/parser": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.44.0.tgz", - "integrity": "sha512-H7LCqbZnKqkkgQHaKLGC6KUjt3pjJDx8ETDqmwncyb6PuoigYajyAwBGz08VU/l86dZWZgI4zm5k2VaKqayYyA==", - "dev": true, - "dependencies": { - "@typescript-eslint/scope-manager": "5.44.0", - "@typescript-eslint/types": "5.44.0", - "@typescript-eslint/typescript-estree": "5.44.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.44.0.tgz", - "integrity": "sha512-2pKml57KusI0LAhgLKae9kwWeITZ7IsZs77YxyNyIVOwQ1kToyXRaJLl+uDEXzMN5hnobKUOo2gKntK9H1YL8g==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.44.0", - "@typescript-eslint/visitor-keys": "5.44.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.44.0.tgz", - "integrity": "sha512-Tp+zDnHmGk4qKR1l+Y1rBvpjpm5tGXX339eAlRBDg+kgZkz9Bw+pqi4dyseOZMsGuSH69fYfPJCBKBrbPCxYFQ==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.44.0.tgz", - "integrity": "sha512-M6Jr+RM7M5zeRj2maSfsZK2660HKAJawv4Ud0xT+yauyvgrsHu276VtXlKDFnEmhG+nVEd0fYZNXGoAgxwDWJw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.44.0", - "@typescript-eslint/visitor-keys": "5.44.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.44.0.tgz", - "integrity": "sha512-a48tLG8/4m62gPFbJ27FxwCOqPKxsb8KC3HkmYoq2As/4YyjQl1jDbRr1s63+g4FS/iIehjmN3L5UjmKva1HzQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.44.0", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/acorn-node": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz", - "integrity": "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==", - "dependencies": { - "acorn": "^7.0.0", - "acorn-walk": "^7.0.0", - "xtend": "^4.0.2" - } - }, - "node_modules/acorn-node/node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-walk": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", - "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", - "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arg": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.1.tgz", - "integrity": "sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA==" - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "node_modules/aria-hidden": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.2.tgz", - "integrity": "sha512-6y/ogyDTk/7YAe91T3E2PR1ALVKyM2QbTio5HwM+N1Q6CMlCKhvClyIjkckBswa0f2xJhjsfzIGa1yVSe1UMVA==", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.9.0 || ^17.0.0 || ^18.0.0", - "react": "^16.9.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/aria-query": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", - "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", - "dev": true, - "dependencies": { - "@babel/runtime": "^7.10.2", - "@babel/runtime-corejs3": "^7.10.2" - }, - "engines": { - "node": ">=6.0" - } - }, - "node_modules/array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "is-string": "^1.0.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.tosorted": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", - "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" - } - }, - "node_modules/ast-types-flow": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", - "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==", - "dev": true - }, - "node_modules/autoprefixer": { - "version": "10.4.4", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.4.tgz", - "integrity": "sha512-Tm8JxsB286VweiZ5F0anmbyGiNI3v3wGv3mz9W+cxEDYB/6jbnj6GM9H9mK3wIL8ftgl+C07Lcwb8PG5PCCPzA==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - } - ], - "dependencies": { - "browserslist": "^4.20.2", - "caniuse-lite": "^1.0.30001317", - "fraction.js": "^4.2.0", - "normalize-range": "^0.1.2", - "picocolors": "^1.0.0", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/axe-core": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.5.2.tgz", - "integrity": "sha512-u2MVsXfew5HBvjsczCv+xlwdNnB1oQR9HlAcsejZttNjKKSkeDNVwB1vMThIUIFI9GoT57Vtk8iQLwqOfAkboA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/axobject-query": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", - "integrity": "sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==", - "dev": true - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true - }, - "node_modules/binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", - "engines": { - "node": ">=8" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.20.2", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.2.tgz", - "integrity": "sha512-CQOBCqp/9pDvDbx3xfMi+86pr4KXIf2FDkTTdeuYw8OxS9t898LA1Khq57gtufFILXpfgsSx5woNgsBgvGjpsA==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - } - ], - "dependencies": { - "caniuse-lite": "^1.0.30001317", - "electron-to-chromium": "^1.4.84", - "escalade": "^3.1.1", - "node-releases": "^2.0.2", - "picocolors": "^1.0.0" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/bufferutil": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.7.tgz", - "integrity": "sha512-kukuqc39WOHtdxtw4UScxF/WVnMFVSQVKhtx3AjZJzhd0RGZZldcrfSEbVsWWe6KNH253574cq5F+wpv0G9pJw==", - "hasInstallScript": true, - "dependencies": { - "node-gyp-build": "^4.3.0" - }, - "engines": { - "node": ">=6.14.2" - } - }, - "node_modules/busboy": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", - "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", - "dependencies": { - "streamsearch": "^1.1.0" - }, - "engines": { - "node": ">=10.16.0" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase-css": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", - "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001689", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001689.tgz", - "integrity": "sha512-CmeR2VBycfa+5/jOfnp/NpWPGd06nf1XYiefUvhXFfZE4GkRc9jv+eGPS4nT558WS/8lYCzV8SlANCIPvbWP1g==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ] - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/client-only": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", - "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" - }, - "node_modules/color": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", - "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", - "license": "MIT", - "optional": true, - "dependencies": { - "color-convert": "^2.0.1", - "color-string": "^1.9.0" - }, - "engines": { - "node": ">=12.5.0" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "node_modules/color-string": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", - "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", - "license": "MIT", - "optional": true, - "dependencies": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "node_modules/core-js-pure": { - "version": "3.26.1", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.26.1.tgz", - "integrity": "sha512-VVXcDpp/xJ21KdULRq/lXdLzQAtX7+37LzpyfFM973il0tWSsDEoyzG38G14AjTpK9VTfiNM9jnFauq/CpaWGQ==", - "dev": true, - "hasInstallScript": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/cosmiconfig": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", - "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", - "dependencies": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/cross-fetch": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz", - "integrity": "sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==", - "dependencies": { - "node-fetch": "2.6.7" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/csstype": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.11.tgz", - "integrity": "sha512-sa6P2wJ+CAbgyy4KFssIb/JNMLxFvKF1pCYCSXS8ZMuqZnMsrxqI2E5sPyoTpxoPU/gVZMzr2zjOfg8GIZOMsw==" - }, - "node_modules/d": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", - "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", - "dependencies": { - "es5-ext": "^0.10.50", - "type": "^1.0.1" - } - }, - "node_modules/damerau-levenshtein": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", - "dev": true - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "node_modules/deepmerge": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", - "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "dev": true, - "dependencies": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/defined": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", - "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" - }, - "node_modules/detect-libc": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", - "license": "Apache-2.0", - "optional": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/detect-node-es": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", - "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" - }, - "node_modules/detective": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/detective/-/detective-5.2.0.tgz", - "integrity": "sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg==", - "dependencies": { - "acorn-node": "^1.6.1", - "defined": "^1.0.0", - "minimist": "^1.1.1" - }, - "bin": { - "detective": "bin/detective.js" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/didyoumean": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", - "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/dlv": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", - "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" - }, - "node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.4.93", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.93.tgz", - "integrity": "sha512-ywq9Pc5Gwwpv7NG767CtoU8xF3aAUQJjH9//Wy3MBCg4w5JSLbJUq2L8IsCdzPMjvSgxuue9WcVaTOyyxCL0aQ==" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/es-abstract": { - "version": "1.20.4", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.4.tgz", - "integrity": "sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", - "dev": true, - "dependencies": { - "has": "^1.0.3" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/es5-ext": { - "version": "0.10.64", - "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz", - "integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==", - "hasInstallScript": true, - "dependencies": { - "es6-iterator": "^2.0.3", - "es6-symbol": "^3.1.3", - "esniff": "^2.0.1", - "next-tick": "^1.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/es6-iterator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", - "integrity": "sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==", - "dependencies": { - "d": "1", - "es5-ext": "^0.10.35", - "es6-symbol": "^3.1.1" - } - }, - "node_modules/es6-symbol": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", - "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", - "dependencies": { - "d": "^1.0.1", - "ext": "^1.1.2" - } - }, - "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.11.0.tgz", - "integrity": "sha512-/KRpd9mIRg2raGxHRGwW9ZywYNAClZrHjdueHcrVDuO3a6bj83eoTirCCk0M0yPwOjWYKHwRVRid+xK4F/GHgA==", - "dev": true, - "dependencies": { - "@eslint/eslintrc": "^1.2.1", - "@humanwhocodes/config-array": "^0.9.2", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.1", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.3.0", - "espree": "^9.3.1", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^6.0.1", - "globals": "^13.6.0", - "ignore": "^5.2.0", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "regexpp": "^3.2.0", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-next": { - "version": "12.3.4", - "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-12.3.4.tgz", - "integrity": "sha512-WuT3gvgi7Bwz00AOmKGhOeqnyA5P29Cdyr0iVjLyfDbk+FANQKcOjFUTZIdyYfe5Tq1x4TGcmoe4CwctGvFjHQ==", - "dev": true, - "dependencies": { - "@next/eslint-plugin-next": "12.3.4", - "@rushstack/eslint-patch": "^1.1.3", - "@typescript-eslint/parser": "^5.21.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-import-resolver-typescript": "^2.7.1", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.5.1", - "eslint-plugin-react": "^7.31.7", - "eslint-plugin-react-hooks": "^4.5.0" - }, - "peerDependencies": { - "eslint": "^7.23.0 || ^8.0.0", - "typescript": ">=3.3.1" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/eslint-import-resolver-node": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", - "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", - "dev": true, - "dependencies": { - "debug": "^3.2.7", - "resolve": "^1.20.0" - } - }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-import-resolver-typescript": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz", - "integrity": "sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==", - "dev": true, - "dependencies": { - "debug": "^4.3.4", - "glob": "^7.2.0", - "is-glob": "^4.0.3", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "*", - "eslint-plugin-import": "*" - } - }, - "node_modules/eslint-module-utils": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", - "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", - "dev": true, - "dependencies": { - "debug": "^3.2.7" - }, - "engines": { - "node": ">=4" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - } - } - }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import": { - "version": "2.26.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", - "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.4", - "array.prototype.flat": "^1.2.5", - "debug": "^2.6.9", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.7.3", - "has": "^1.0.3", - "is-core-module": "^2.8.1", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.values": "^1.1.5", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" - } - }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true - }, - "node_modules/eslint-plugin-jsx-a11y": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz", - "integrity": "sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==", - "dev": true, - "dependencies": { - "@babel/runtime": "^7.18.9", - "aria-query": "^4.2.2", - "array-includes": "^3.1.5", - "ast-types-flow": "^0.0.7", - "axe-core": "^4.4.3", - "axobject-query": "^2.2.0", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "has": "^1.0.3", - "jsx-ast-utils": "^3.3.2", - "language-tags": "^1.0.5", - "minimatch": "^3.1.2", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=4.0" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-jsx-a11y/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-plugin-react": { - "version": "7.31.11", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.11.tgz", - "integrity": "sha512-TTvq5JsT5v56wPa9OYHzsrOlHzKZKjV+aLgS+55NJP/cuzdiQPC7PfYoUjMoxlffKtvijpk7vA/jmuqRb9nohw==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", - "doctrine": "^2.1.0", - "estraverse": "^5.3.0", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.3", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.8" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", - "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", - "dev": true, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" - } - }, - "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", - "dev": true, - "dependencies": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/eslint-plugin-react/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^2.0.0" - }, - "engines": { - "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=5" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/eslint/node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/esniff": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz", - "integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==", - "dependencies": { - "d": "^1.0.1", - "es5-ext": "^0.10.62", - "event-emitter": "^0.3.5", - "type": "^2.7.2" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esniff/node_modules/type": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz", - "integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==" - }, - "node_modules/espree": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.1.tgz", - "integrity": "sha512-bvdyLmJMfwkV3NCRl5ZhJf22zBFo1y8bYh3VYb+bfzqNB4Je68P2sSuXyuFquzWLebHpNd2/d5uv7yoP9ISnGQ==", - "dev": true, - "dependencies": { - "acorn": "^8.7.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "dev": true, - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/event-emitter": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", - "integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==", - "dependencies": { - "d": "1", - "es5-ext": "~0.10.14" - } - }, - "node_modules/ext": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/ext/-/ext-1.7.0.tgz", - "integrity": "sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==", - "dependencies": { - "type": "^2.7.2" - } - }, - "node_modules/ext/node_modules/type": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz", - "integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==" - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "node_modules/fast-glob": { - "version": "3.2.11", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", - "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, - "node_modules/fastq": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", - "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "dependencies": { - "flat-cache": "^3.0.4" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", - "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", - "dev": true - }, - "node_modules/formik": { - "version": "2.2.9", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.2.9.tgz", - "integrity": "sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA==", - "funding": [ - { - "type": "individual", - "url": "https://opencollective.com/formik" - } - ], - "dependencies": { - "deepmerge": "^2.1.1", - "hoist-non-react-statics": "^3.3.0", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "react-fast-compare": "^2.0.1", - "tiny-warning": "^1.0.2", - "tslib": "^1.10.0" - }, - "peerDependencies": { - "react": ">=16.8.0" - } - }, - "node_modules/formik/node_modules/deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/formik/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - }, - "node_modules/fraction.js": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", - "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://www.patreon.com/infusion" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true - }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-nonce": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", - "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", - "engines": { - "node": ">=6" - } - }, - "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/globals": { - "version": "13.13.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.13.0.tgz", - "integrity": "sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A==", - "dev": true, - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "dependencies": { - "react-is": "^16.7.0" - } - }, - "node_modules/ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true, - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "node_modules/internal-slot": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", - "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dependencies": { - "loose-envify": "^1.0.0" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" - }, - "node_modules/is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "dev": true, - "dependencies": { - "has-bigints": "^1.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-core-module": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", - "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", - "dependencies": { - "has": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" - }, - "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true - }, - "node_modules/json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", - "dev": true, - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, - "node_modules/jsx-ast-utils": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", - "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.5", - "object.assign": "^4.1.3" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/language-subtag-registry": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", - "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==", - "dev": true - }, - "node_modules/language-tags": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", - "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", - "dev": true, - "dependencies": { - "language-subtag-registry": "~0.3.2" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/lilconfig": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.5.tgz", - "integrity": "sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg==", - "engines": { - "node": ">=10" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" - }, - "node_modules/lodash.castarray": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz", - "integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==" - }, - "node_modules/lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" - }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" - }, - "node_modules/lodash.samplesize": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.samplesize/-/lodash.samplesize-4.2.0.tgz", - "integrity": "sha1-Rgdi+7KzQikFF0mekNUVhttGX/k=" - }, - "node_modules/lodash.throttle": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz", - "integrity": "sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mini-svg-data-uri": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz", - "integrity": "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==", - "bin": { - "mini-svg-data-uri": "cli.js" - } - }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", - "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "node_modules/next": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/next/-/next-15.2.4.tgz", - "integrity": "sha512-VwL+LAaPSxEkd3lU2xWbgEOtrM8oedmyhBqaVNmgKB+GvZlCy9rgaEc+y2on0wv+l0oSFqLtYD6dcC1eAedUaQ==", - "license": "MIT", - "dependencies": { - "@next/env": "15.2.4", - "@swc/counter": "0.1.3", - "@swc/helpers": "0.5.15", - "busboy": "1.6.0", - "caniuse-lite": "^1.0.30001579", - "postcss": "8.4.31", - "styled-jsx": "5.1.6" - }, - "bin": { - "next": "dist/bin/next" - }, - "engines": { - "node": "^18.18.0 || ^19.8.0 || >= 20.0.0" - }, - "optionalDependencies": { - "@next/swc-darwin-arm64": "15.2.4", - "@next/swc-darwin-x64": "15.2.4", - "@next/swc-linux-arm64-gnu": "15.2.4", - "@next/swc-linux-arm64-musl": "15.2.4", - "@next/swc-linux-x64-gnu": "15.2.4", - "@next/swc-linux-x64-musl": "15.2.4", - "@next/swc-win32-arm64-msvc": "15.2.4", - "@next/swc-win32-x64-msvc": "15.2.4", - "sharp": "^0.33.5" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.1.0", - "@playwright/test": "^1.41.2", - "babel-plugin-react-compiler": "*", - "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", - "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", - "sass": "^1.3.0" - }, - "peerDependenciesMeta": { - "@opentelemetry/api": { - "optional": true - }, - "@playwright/test": { - "optional": true - }, - "babel-plugin-react-compiler": { - "optional": true - }, - "sass": { - "optional": true - } - } - }, - "node_modules/next-tick": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", - "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==" - }, - "node_modules/next/node_modules/postcss": { - "version": "8.4.31", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", - "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "nanoid": "^3.3.6", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-gyp-build": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.5.0.tgz", - "integrity": "sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg==", - "bin": { - "node-gyp-build": "bin.js", - "node-gyp-build-optional": "optional.js", - "node-gyp-build-test": "build-test.js" - } - }, - "node_modules/node-releases": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.2.tgz", - "integrity": "sha512-XxYDdcQ6eKqp/YjI+tb2C5WM2LgjnZrfYg4vgQt49EK268b6gYCHsBLrK2qvJo4FmCtqmKezb0WZFK4fkrZNsg==" - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-hash": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", - "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.entries": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz", - "integrity": "sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.hasown": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", - "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", - "dev": true, - "dependencies": { - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "dev": true, - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "engines": { - "node": ">=8" - } - }, - "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/postcss": { - "version": "8.4.32", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.32.tgz", - "integrity": "sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "nanoid": "^3.3.7", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-js": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.0.tgz", - "integrity": "sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ==", - "dependencies": { - "camelcase-css": "^2.0.1" - }, - "engines": { - "node": "^12 || ^14 || >= 16" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - "peerDependencies": { - "postcss": "^8.3.3" - } - }, - "node_modules/postcss-nested": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-5.0.6.tgz", - "integrity": "sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA==", - "dependencies": { - "postcss-selector-parser": "^6.0.6" - }, - "engines": { - "node": ">=12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - "peerDependencies": { - "postcss": "^8.2.14" - } - }, - "node_modules/postcss-selector-parser": { - "version": "6.0.10", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", - "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, - "node_modules/punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/react": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", - "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", - "dependencies": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", - "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", - "dependencies": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1", - "scheduler": "^0.20.2" - }, - "peerDependencies": { - "react": "17.0.2" - } - }, - "node_modules/react-fast-compare": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", - "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==" - }, - "node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "node_modules/react-remove-scroll": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", - "integrity": "sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==", - "dependencies": { - "react-remove-scroll-bar": "^2.3.3", - "react-style-singleton": "^2.2.1", - "tslib": "^2.1.0", - "use-callback-ref": "^1.3.0", - "use-sidecar": "^1.1.2" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-remove-scroll-bar": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz", - "integrity": "sha512-63C4YQBUt0m6ALadE9XV56hV8BgJWDmmTPY758iIJjfQKt2nYwoUrPk0LXRXcB/yIj82T1/Ixfdpdk68LwIB0A==", - "dependencies": { - "react-style-singleton": "^2.2.1", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-style-singleton": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", - "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==", - "dependencies": { - "get-nonce": "^1.0.0", - "invariant": "^2.2.4", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/regenerator-runtime": { - "version": "0.13.11", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", - "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", - "dev": true - }, - "node_modules/regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/resolve": { - "version": "1.22.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", - "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", - "dependencies": { - "is-core-module": "^2.8.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "engines": { - "node": ">=4" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/scheduler": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", - "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", - "dependencies": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "devOptional": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/sharp": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", - "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", - "hasInstallScript": true, - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "color": "^4.2.3", - "detect-libc": "^2.0.3", - "semver": "^7.6.3" - }, - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-darwin-arm64": "0.33.5", - "@img/sharp-darwin-x64": "0.33.5", - "@img/sharp-libvips-darwin-arm64": "1.0.4", - "@img/sharp-libvips-darwin-x64": "1.0.4", - "@img/sharp-libvips-linux-arm": "1.0.5", - "@img/sharp-libvips-linux-arm64": "1.0.4", - "@img/sharp-libvips-linux-s390x": "1.0.4", - "@img/sharp-libvips-linux-x64": "1.0.4", - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", - "@img/sharp-libvips-linuxmusl-x64": "1.0.4", - "@img/sharp-linux-arm": "0.33.5", - "@img/sharp-linux-arm64": "0.33.5", - "@img/sharp-linux-s390x": "0.33.5", - "@img/sharp-linux-x64": "0.33.5", - "@img/sharp-linuxmusl-arm64": "0.33.5", - "@img/sharp-linuxmusl-x64": "0.33.5", - "@img/sharp-wasm32": "0.33.5", - "@img/sharp-win32-ia32": "0.33.5", - "@img/sharp-win32-x64": "0.33.5" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", - "license": "MIT", - "optional": true, - "dependencies": { - "is-arrayish": "^0.3.1" - } - }, - "node_modules/simple-swizzle/node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "license": "MIT", - "optional": true - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/streamsearch": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", - "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/string.prototype.matchall": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", - "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.3", - "side-channel": "^1.0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/styled-jsx": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz", - "integrity": "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==", - "license": "MIT", - "dependencies": { - "client-only": "0.0.1" - }, - "engines": { - "node": ">= 12.0.0" - }, - "peerDependencies": { - "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0" - }, - "peerDependenciesMeta": { - "@babel/core": { - "optional": true - }, - "babel-plugin-macros": { - "optional": true - } - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/tailwindcss": { - "version": "3.0.23", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.0.23.tgz", - "integrity": "sha512-+OZOV9ubyQ6oI2BXEhzw4HrqvgcARY38xv3zKcjnWtMIZstEsXdI9xftd1iB7+RbOnj2HOEzkA0OyB5BaSxPQA==", - "dependencies": { - "arg": "^5.0.1", - "chalk": "^4.1.2", - "chokidar": "^3.5.3", - "color-name": "^1.1.4", - "cosmiconfig": "^7.0.1", - "detective": "^5.2.0", - "didyoumean": "^1.2.2", - "dlv": "^1.1.3", - "fast-glob": "^3.2.11", - "glob-parent": "^6.0.2", - "is-glob": "^4.0.3", - "normalize-path": "^3.0.0", - "object-hash": "^2.2.0", - "postcss": "^8.4.6", - "postcss-js": "^4.0.0", - "postcss-load-config": "^3.1.0", - "postcss-nested": "5.0.6", - "postcss-selector-parser": "^6.0.9", - "postcss-value-parser": "^4.2.0", - "quick-lru": "^5.1.1", - "resolve": "^1.22.0" - }, - "bin": { - "tailwind": "lib/cli.js", - "tailwindcss": "lib/cli.js" - }, - "engines": { - "node": ">=12.13.0" - }, - "peerDependencies": { - "autoprefixer": "^10.0.2", - "postcss": "^8.0.9" - } - }, - "node_modules/tailwindcss-radix": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/tailwindcss-radix/-/tailwindcss-radix-1.6.0.tgz", - "integrity": "sha512-5oBgGCVGsITMiUVlc6Euj4kt03l8htLJxVT9AXbkFxcJiXLtQxJriFq/8R+3s63OKit/ynCVdkqvlnW6H7iG1g==" - }, - "node_modules/tailwindcss/node_modules/postcss-load-config": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.3.tgz", - "integrity": "sha512-5EYgaM9auHGtO//ljHH+v/aC/TQ5LHXtL7bQajNAUBKUVKiYE8rYpFms7+V26D9FncaGe2zwCoPQsFKb5zF/Hw==", - "dependencies": { - "lilconfig": "^2.0.4", - "yaml": "^1.10.2" - }, - "engines": { - "node": ">= 10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - "peerDependencies": { - "ts-node": ">=9.0.0" - }, - "peerDependenciesMeta": { - "ts-node": { - "optional": true - } - } - }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true - }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/tsconfig-paths": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", - "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", - "dev": true, - "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD" - }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "dependencies": { - "tslib": "^1.8.1" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - } - }, - "node_modules/tsutils/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - }, - "node_modules/type": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", - "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==" - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "dependencies": { - "is-typedarray": "^1.0.0" - } - }, - "node_modules/typescript": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.2.tgz", - "integrity": "sha512-HM/hFigTBHZhLXshn9sN37H085+hQGeJHJ/X7LpBWLID/fbc2acUMfU+lGD98X81sKP+pFa9f0DZmCwB9GnbAg==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/use-callback-ref": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.0.tgz", - "integrity": "sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w==", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/use-sidecar": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz", - "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==", - "dependencies": { - "detect-node-es": "^1.1.0", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.9.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/utf-8-validate": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.10.tgz", - "integrity": "sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ==", - "hasInstallScript": true, - "dependencies": { - "node-gyp-build": "^4.3.0" - }, - "engines": { - "node": ">=6.14.2" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "node_modules/v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "node_modules/websocket": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/websocket/-/websocket-1.0.34.tgz", - "integrity": "sha512-PRDso2sGwF6kM75QykIesBijKSVceR6jL2G8NGYyq2XrItNC2P5/qL5XeR056GhA+Ly7JMFvJb9I312mJfmqnQ==", - "dependencies": { - "bufferutil": "^4.0.1", - "debug": "^2.2.0", - "es5-ext": "^0.10.50", - "typedarray-to-buffer": "^3.1.5", - "utf-8-validate": "^5.0.2", - "yaeti": "^0.0.6" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/websocket/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/websocket/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "dev": true, - "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/word-wrap": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", - "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "engines": { - "node": ">=0.4" - } - }, - "node_modules/yaeti": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/yaeti/-/yaeti-0.0.6.tgz", - "integrity": "sha512-MvQa//+KcZCUkBTIC9blM+CU9J2GzuTytsOUwf2lidtvkx/6gnEp1QvJv34t9vdjhFmha/mUiNDbN0D0mJWdug==", - "engines": { - "node": ">=0.10.32" - } - }, - "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "engines": { - "node": ">= 6" - } - } - }, - "dependencies": { - "@babel/code-frame": { - "version": "7.16.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", - "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", - "requires": { - "@babel/highlight": "^7.16.7" - } - }, - "@babel/helper-validator-identifier": { - "version": "7.16.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", - "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==" - }, - "@babel/highlight": { - "version": "7.16.10", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", - "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", - "requires": { - "@babel/helper-validator-identifier": "^7.16.7", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "@babel/runtime": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.0.tgz", - "integrity": "sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==", - "requires": { - "regenerator-runtime": "^0.14.0" - }, - "dependencies": { - "regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" - } - } - }, - "@babel/runtime-corejs3": { - "version": "7.20.1", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.20.1.tgz", - "integrity": "sha512-CGulbEDcg/ND1Im7fUNRZdGXmX2MTWVVZacQi/6DiKE5HNwZ3aVTm5PV4lO8HHz0B2h8WQyvKKjbX5XgTtydsg==", - "dev": true, - "requires": { - "core-js-pure": "^3.25.1", - "regenerator-runtime": "^0.13.10" - } - }, - "@emnapi/runtime": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.3.tgz", - "integrity": "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ==", - "optional": true, - "requires": { - "tslib": "^2.4.0" - } - }, - "@eslint/eslintrc": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", - "integrity": "sha512-bxvbYnBPN1Gibwyp6NrpnFzA3YtRL3BBAyEAFVIpNTm2Rn4Vy87GA5M4aSn3InRrlsbX5N0GW7XIx+U4SAEKdQ==", - "dev": true, - "requires": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.3.1", - "globals": "^13.9.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", - "strip-json-comments": "^3.1.1" - } - }, - "@headlessui/react": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.4.tgz", - "integrity": "sha512-D8n5yGCF3WIkPsjEYeM8knn9jQ70bigGGb5aUvN6y4BGxcT3OcOQOKcM3zRGllRCZCFxCZyQvYJF6ZE7bQUOyQ==", - "requires": { - "client-only": "^0.0.1" - } - }, - "@humanwhocodes/config-array": { - "version": "0.9.5", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz", - "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==", - "dev": true, - "requires": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.4" - } - }, - "@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true - }, - "@img/sharp-darwin-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", - "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", - "optional": true, - "requires": { - "@img/sharp-libvips-darwin-arm64": "1.0.4" - } - }, - "@img/sharp-darwin-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", - "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", - "optional": true, - "requires": { - "@img/sharp-libvips-darwin-x64": "1.0.4" - } - }, - "@img/sharp-libvips-darwin-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", - "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", - "optional": true - }, - "@img/sharp-libvips-darwin-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", - "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", - "optional": true - }, - "@img/sharp-libvips-linux-arm": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", - "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", - "optional": true - }, - "@img/sharp-libvips-linux-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", - "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", - "optional": true - }, - "@img/sharp-libvips-linux-s390x": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", - "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", - "optional": true - }, - "@img/sharp-libvips-linux-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", - "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", - "optional": true - }, - "@img/sharp-libvips-linuxmusl-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", - "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", - "optional": true - }, - "@img/sharp-libvips-linuxmusl-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", - "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", - "optional": true - }, - "@img/sharp-linux-arm": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", - "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", - "optional": true, - "requires": { - "@img/sharp-libvips-linux-arm": "1.0.5" - } - }, - "@img/sharp-linux-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", - "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", - "optional": true, - "requires": { - "@img/sharp-libvips-linux-arm64": "1.0.4" - } - }, - "@img/sharp-linux-s390x": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", - "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", - "optional": true, - "requires": { - "@img/sharp-libvips-linux-s390x": "1.0.4" - } - }, - "@img/sharp-linux-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", - "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", - "optional": true, - "requires": { - "@img/sharp-libvips-linux-x64": "1.0.4" - } - }, - "@img/sharp-linuxmusl-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", - "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", - "optional": true, - "requires": { - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" - } - }, - "@img/sharp-linuxmusl-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", - "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", - "optional": true, - "requires": { - "@img/sharp-libvips-linuxmusl-x64": "1.0.4" - } - }, - "@img/sharp-wasm32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", - "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", - "optional": true, - "requires": { - "@emnapi/runtime": "^1.2.0" - } - }, - "@img/sharp-win32-ia32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", - "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", - "optional": true - }, - "@img/sharp-win32-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", - "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", - "optional": true - }, - "@mertasan/tailwindcss-variables": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@mertasan/tailwindcss-variables/-/tailwindcss-variables-2.5.1.tgz", - "integrity": "sha512-I1Jvpu5fcinGT/yEDL53dRXznFWV4LoTCUVcTvQqA1YH1iAfs72OO/VZdBKPqcxe/lS2nBr/Ikloe+pLsxemmA==", - "requires": { - "lodash": "^4.17.21" - } - }, - "@next/env": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/env/-/env-15.2.4.tgz", - "integrity": "sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==" - }, - "@next/eslint-plugin-next": { - "version": "12.3.4", - "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.4.tgz", - "integrity": "sha512-BFwj8ykJY+zc1/jWANsDprDIu2MgwPOIKxNVnrKvPs+f5TPegrVnem8uScND+1veT4B7F6VeqgaNLFW1Hzl9Og==", - "dev": true, - "requires": { - "glob": "7.1.7" - }, - "dependencies": { - "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - } - } - }, - "@next/swc-darwin-arm64": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.2.4.tgz", - "integrity": "sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==", - "optional": true - }, - "@next/swc-darwin-x64": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.2.4.tgz", - "integrity": "sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==", - "optional": true - }, - "@next/swc-linux-arm64-gnu": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.2.4.tgz", - "integrity": "sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==", - "optional": true - }, - "@next/swc-linux-arm64-musl": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.2.4.tgz", - "integrity": "sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==", - "optional": true - }, - "@next/swc-linux-x64-gnu": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.2.4.tgz", - "integrity": "sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==", - "optional": true - }, - "@next/swc-linux-x64-musl": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.2.4.tgz", - "integrity": "sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==", - "optional": true - }, - "@next/swc-win32-arm64-msvc": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.2.4.tgz", - "integrity": "sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==", - "optional": true - }, - "@next/swc-win32-x64-msvc": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.2.4.tgz", - "integrity": "sha512-SbnWkJmkS7Xl3kre8SdMF6F/XDh1DTFEhp0jRTj/uB8iPKoU2bb2NDfcu+iifv1+mxQEd1g2vvSxcZbXSKyWiQ==", - "optional": true - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@radix-ui/colors": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/@radix-ui/colors/-/colors-0.1.8.tgz", - "integrity": "sha512-jwRMXYwC0hUo0mv6wGpuw254Pd9p/R6Td5xsRpOmaWkUHlooNWqVcadgyzlRumMq3xfOTXwJReU0Jv+EIy4Jbw==" - }, - "@radix-ui/popper": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/popper/-/popper-0.1.0.tgz", - "integrity": "sha512-uzYeElL3w7SeNMuQpXiFlBhTT+JyaNMCwDfjKkrzugEcYrf5n52PHqncNdQPUtR42hJh8V9FsqyEDbDxkeNjJQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "csstype": "^3.0.4" - } - }, - "@radix-ui/primitive": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-0.1.0.tgz", - "integrity": "sha512-tqxZKybwN5Fa3VzZry4G6mXAAb9aAqKmPtnVbZpL0vsBwvOHTBwsjHVPXylocYLwEtBY9SCe665bYnNB515uoA==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-presence": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-0.1.2.tgz", - "integrity": "sha512-3BRlFZraooIUfRlyN+b/Xs5hq1lanOOo/+3h6Pwu2GMFjkGKKa4Rd51fcqGqnVlbr3jYg+WLuGyAV4KlgqwrQw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-0.1.1.tgz", - "integrity": "sha512-g3hnE/UcOg7REdewduRPAK88EPuLZtaq7sA9ouu8S+YEtnyFRI16jgv6GZYe3VMoQLL1T171ebmEPtDjyxWLzw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@rushstack/eslint-patch": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", - "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==", - "dev": true - }, - "@supabase/functions-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.0.0.tgz", - "integrity": "sha512-ozb7bds2yvf5k7NM2ZzUkxvsx4S4i2eRKFSJetdTADV91T65g4gCzEs9L3LUXSrghcGIkUaon03VPzOrFredqg==", - "requires": { - "cross-fetch": "^3.1.5" - } - }, - "@supabase/gotrue-js": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/@supabase/gotrue-js/-/gotrue-js-2.3.1.tgz", - "integrity": "sha512-txYVDrKAFXxT4nyVGnW3M9Oid4u3Xe/Na+wTEzwU+IBuPUEz72ZBHNKo6HBKlZNpnlGtgCSciYhH8qFkZYGV3g==", - "requires": { - "cross-fetch": "^3.1.5" - } - }, - "@supabase/postgrest-js": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.1.0.tgz", - "integrity": "sha512-qkY8TqIu5sJuae8gjeDPjEqPrefzcTraW9PNSVJQHq4TEv98ZmwaXGwBGz0bVL63bqrGA5hqREbQHkANUTXrvA==", - "requires": { - "cross-fetch": "^3.1.5" - } - }, - "@supabase/realtime-js": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.1.0.tgz", - "integrity": "sha512-iplLCofTeYjnx9FIOsIwHLhMp0+7UVyiA4/sCeq40VdOgN9eTIhjEno9Tgh4dJARi4aaXoKfRX1DTxgZaOpPAw==", - "requires": { - "@types/phoenix": "^1.5.4", - "websocket": "^1.0.34" - } - }, - "@supabase/storage-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.0.0.tgz", - "integrity": "sha512-7kXThdRt/xqnOOvZZxBqNkeX1CFNUWc0hYBJtNN/Uvt8ok9hD14foYmroWrHn046wEYFqUrB9U35JYsfTrvltA==", - "requires": { - "cross-fetch": "^3.1.5" - } - }, - "@supabase/supabase-js": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.1.0.tgz", - "integrity": "sha512-hODrAUDSC6RV6EhwuSMyhaQCF32gij0EBTceuDR+8suJsg7XcyUG0fYgeYecWIvt0nz61xAMY6E+Ywb0tJaAng==", - "requires": { - "@supabase/functions-js": "^2.0.0", - "@supabase/gotrue-js": "^2.3.0", - "@supabase/postgrest-js": "^1.1.0", - "@supabase/realtime-js": "^2.1.0", - "@supabase/storage-js": "^2.0.0", - "cross-fetch": "^3.1.5" - } - }, - "@supabase/ui": { - "version": "0.37.0-alpha.81", - "resolved": "https://registry.npmjs.org/@supabase/ui/-/ui-0.37.0-alpha.81.tgz", - "integrity": "sha512-CxqdikE6wGw6pGQ6b3vRA8qnvCK20VyeMyy8Z4hJ/Dg2qRfgQqbrv7qS+6A1S8pg657EzCCo0DIH75SijaU8eA==", - "requires": { - "@headlessui/react": "^1.0.0", - "@mertasan/tailwindcss-variables": "^2.0.1", - "@radix-ui/colors": "^0.1.8", - "@radix-ui/react-accordion": "^0.1.5", - "@radix-ui/react-collapsible": "^0.1.5", - "@radix-ui/react-context-menu": "^0.1.0", - "@radix-ui/react-dialog": "^0.1.5", - "@radix-ui/react-dropdown-menu": "^0.1.4", - "@radix-ui/react-popover": "^0.1.0", - "@radix-ui/react-portal": "^0.1.3", - "@radix-ui/react-tabs": "^0.1.0", - "@tailwindcss/forms": "^0.4.0", - "@tailwindcss/typography": "^0.5.0", - "autoprefixer": "^10.4.2", - "deepmerge": "^4.2.2", - "formik": "^2.2.9", - "fsevents": "^2.3.2", - "lodash": "^4.17.20", - "postcss": "^8.4.5", - "prop-types": "^15.7.2", - "tailwindcss": "^3.0.15", - "tailwindcss-radix": "^1.6.0" - }, - "dependencies": { - "@radix-ui/react-accordion": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-accordion/-/react-accordion-0.1.6.tgz", - "integrity": "sha512-LOXlqPU6y6EMBopdRIKCWFvMPY1wPTQ4uJiX7ZVxldrMJcM7imBzI3wlRTkPCHZ3FLHmpuw+cQi3du23pzJp1g==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collapsible": "0.1.6", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-collapsible": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-0.1.6.tgz", - "integrity": "sha512-Gkf8VuqMc6HTLzA2AxVYnyK6aMczVLpatCjdD9Lj4wlYLXCz9KtiqZYslLMeqnQFLwLyZS0WKX/pQ8j5fioIBw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-context-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context-menu/-/react-context-menu-0.1.6.tgz", - "integrity": "sha512-0qa6ABaeqD+WYI+8iT0jH0QLLcV8Kv0xI+mZL4FFnG4ec9H0v+yngb5cfBBfs9e/KM8mDzFFpaeegqsQlLNqyQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-menu": "0.1.6", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-0.1.6.tgz", - "integrity": "sha512-ho3+bhpr3oAFkOBJ8VkUb1BcGoiZBB3OmcWPqa6i5RTUKrzNX/d6rauochu2xDlWjiRtpVuiAcsTVOeIC4FbYQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-popper": "0.1.4", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-roving-focus": "0.1.5", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-direction": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "dependencies": { - "@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - } - } - }, - "@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-popper": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-0.1.4.tgz", - "integrity": "sha512-18gDYof97t8UQa7zwklG1Dr8jIdj3u+rVOQLzPi9f5i1YQak/pVGkaqw8aY+iDUknKKuZniTk/7jbAJUYlKyOw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/popper": "0.1.0", - "@radix-ui/react-arrow": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-rect": "0.1.1", - "@radix-ui/react-use-size": "0.1.1", - "@radix-ui/rect": "0.1.1" - }, - "dependencies": { - "@radix-ui/react-arrow": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-0.1.4.tgz", - "integrity": "sha512-BB6XzAb7Ml7+wwpFdYVtZpK1BlMgqyafSQNGzhIpSZ4uXvXOHPlR5GP8M449JkeQzgQjv9Mp1AsJxFC0KuOtuA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4" - } - }, - "@radix-ui/react-use-rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-0.1.1.tgz", - "integrity": "sha512-kHNNXAsP3/PeszEmM/nxBBS9Jbo93sO+xuMTcRfwzXsmxT5gDXQzAiKbZQ0EecCPtJIzqvr7dlaQi/aP1PKYqQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "0.1.1" - } - }, - "@radix-ui/react-use-size": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-0.1.1.tgz", - "integrity": "sha512-pTgWM5qKBu6C7kfKxrKPoBI2zZYZmp2cSXzpUiGM3qEBQlMLtYhaY2JXdXUCxz+XmD1YEjc8oRwvyfsD4AG4WA==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-roving-focus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-0.1.5.tgz", - "integrity": "sha512-ClwKPS5JZE+PaHCoW7eu1onvE61pDv4kO8W4t5Ra3qMFQiTJLZMdpBQUhksN//DaVygoLirz4Samdr5Y1x1FSA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - } - } - }, - "@radix-ui/react-use-direction": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-direction/-/react-use-direction-0.1.0.tgz", - "integrity": "sha512-NajpY/An9TCPSfOVkgWIdXJV+VuWl67PxB6kOKYmtNAFHvObzIoh8o0n9sAuwSAyFCZVq211FEf9gvVDRhOyiA==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-dialog": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-0.1.7.tgz", - "integrity": "sha512-jXt8srGhHBRvEr9jhEAiwwJzWCWZoGRJ030aC9ja/gkRJbZdy0iD3FwXf+Ff4RtsZyLUMHW7VUwFOlz3Ixe1Vw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2", - "@radix-ui/react-use-controllable-state": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - } - } - }, - "@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - } - }, - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - }, - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-dropdown-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-0.1.6.tgz", - "integrity": "sha512-RZhtzjWwJ4ZBN7D8ek4Zn+ilHzYuYta9yIxFnbC0pfqMnSi67IQNONo1tuuNqtFh9SRHacPKc65zo+kBBlxtdg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-menu": "0.1.6", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-menu": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-0.1.6.tgz", - "integrity": "sha512-ho3+bhpr3oAFkOBJ8VkUb1BcGoiZBB3OmcWPqa6i5RTUKrzNX/d6rauochu2xDlWjiRtpVuiAcsTVOeIC4FbYQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-popper": "0.1.4", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-roving-focus": "0.1.5", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-direction": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "dependencies": { - "@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - } - } - }, - "@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - }, - "@radix-ui/react-popper": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-0.1.4.tgz", - "integrity": "sha512-18gDYof97t8UQa7zwklG1Dr8jIdj3u+rVOQLzPi9f5i1YQak/pVGkaqw8aY+iDUknKKuZniTk/7jbAJUYlKyOw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/popper": "0.1.0", - "@radix-ui/react-arrow": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-rect": "0.1.1", - "@radix-ui/react-use-size": "0.1.1", - "@radix-ui/rect": "0.1.1" - }, - "dependencies": { - "@radix-ui/react-arrow": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-0.1.4.tgz", - "integrity": "sha512-BB6XzAb7Ml7+wwpFdYVtZpK1BlMgqyafSQNGzhIpSZ4uXvXOHPlR5GP8M449JkeQzgQjv9Mp1AsJxFC0KuOtuA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4" - } - }, - "@radix-ui/react-use-rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-0.1.1.tgz", - "integrity": "sha512-kHNNXAsP3/PeszEmM/nxBBS9Jbo93sO+xuMTcRfwzXsmxT5gDXQzAiKbZQ0EecCPtJIzqvr7dlaQi/aP1PKYqQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "0.1.1" - } - }, - "@radix-ui/react-use-size": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-0.1.1.tgz", - "integrity": "sha512-pTgWM5qKBu6C7kfKxrKPoBI2zZYZmp2cSXzpUiGM3qEBQlMLtYhaY2JXdXUCxz+XmD1YEjc8oRwvyfsD4AG4WA==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-roving-focus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-0.1.5.tgz", - "integrity": "sha512-ClwKPS5JZE+PaHCoW7eu1onvE61pDv4kO8W4t5Ra3qMFQiTJLZMdpBQUhksN//DaVygoLirz4Samdr5Y1x1FSA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-controllable-state": "0.1.0" - } - }, - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-use-direction": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-direction/-/react-use-direction-0.1.0.tgz", - "integrity": "sha512-NajpY/An9TCPSfOVkgWIdXJV+VuWl67PxB6kOKYmtNAFHvObzIoh8o0n9sAuwSAyFCZVq211FEf9gvVDRhOyiA==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-popover": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-0.1.6.tgz", - "integrity": "sha512-zQzgUqW4RQDb0ItAL1xNW4K4olUrkfV3jeEPs9rG+nsDQurO+W9TT+YZ9H1mmgAJqlthyv1sBRZGdBm4YjtD6Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-dismissable-layer": "0.1.5", - "@radix-ui/react-focus-guards": "0.1.0", - "@radix-ui/react-focus-scope": "0.1.4", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-popper": "0.1.4", - "@radix-ui/react-portal": "0.1.4", - "@radix-ui/react-presence": "0.1.2", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-controllable-state": "0.1.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "^2.4.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-dismissable-layer": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-0.1.5.tgz", - "integrity": "sha512-J+fYWijkX4M4QKwf9dtu1oC0U6e6CEl8WhBp3Ad23yz2Hia0XCo6Pk/mp5CAFy4QBtQedTSkhW05AdtSOEoajQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-body-pointer-events": "0.1.1", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-escape-keydown": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-body-pointer-events": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-body-pointer-events/-/react-use-body-pointer-events-0.1.1.tgz", - "integrity": "sha512-R8leV2AWmJokTmERM8cMXFHWSiv/fzOLhG/JLmRBhLTAzOj37EQizssq4oW0Z29VcZy2tODMi9Pk/htxwb+xpA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-use-escape-keydown": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-0.1.0.tgz", - "integrity": "sha512-tDLZbTGFmvXaazUXXv8kYbiCcbAE8yKgng9s95d8fCO+Eundv0Jngbn/hKPhDDs4jj9ChwRX5cDDnlaN+ugYYQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - } - } - } - }, - "@radix-ui/react-focus-guards": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-0.1.0.tgz", - "integrity": "sha512-kRx/swAjEfBpQ3ns7J3H4uxpXuWCqN7MpALiSDOXiyo2vkWv0L9sxvbpZeTulINuE3CGMzicVMuNc/VWXjFKOg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-focus-scope": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-0.1.4.tgz", - "integrity": "sha512-fbA4ES3H4Wkxp+OeLhvN6SwL7mXNn/aBtUf7DRYxY9+Akrf7dRxl2ck4lgcpPsSg3zSDsEwLcY+h5cmj5yvlug==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-popper": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-0.1.4.tgz", - "integrity": "sha512-18gDYof97t8UQa7zwklG1Dr8jIdj3u+rVOQLzPi9f5i1YQak/pVGkaqw8aY+iDUknKKuZniTk/7jbAJUYlKyOw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/popper": "0.1.0", - "@radix-ui/react-arrow": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-rect": "0.1.1", - "@radix-ui/react-use-size": "0.1.1", - "@radix-ui/rect": "0.1.1" - }, - "dependencies": { - "@radix-ui/react-arrow": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-0.1.4.tgz", - "integrity": "sha512-BB6XzAb7Ml7+wwpFdYVtZpK1BlMgqyafSQNGzhIpSZ4uXvXOHPlR5GP8M449JkeQzgQjv9Mp1AsJxFC0KuOtuA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4" - } - }, - "@radix-ui/react-use-rect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-0.1.1.tgz", - "integrity": "sha512-kHNNXAsP3/PeszEmM/nxBBS9Jbo93sO+xuMTcRfwzXsmxT5gDXQzAiKbZQ0EecCPtJIzqvr7dlaQi/aP1PKYqQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "0.1.1" - } - }, - "@radix-ui/react-use-size": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-0.1.1.tgz", - "integrity": "sha512-pTgWM5qKBu6C7kfKxrKPoBI2zZYZmp2cSXzpUiGM3qEBQlMLtYhaY2JXdXUCxz+XmD1YEjc8oRwvyfsD4AG4WA==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-portal": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-0.1.4.tgz", - "integrity": "sha512-MO0wRy2eYRTZ/CyOri9NANCAtAtq89DEtg90gicaTlkCfdqCLEBsLb+/q66BZQTr3xX/Vq01nnVfc/TkCqoqvw==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-tabs": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-0.1.5.tgz", - "integrity": "sha512-ieVQS1TFr0dX1XA8B+CsSFKOE7kcgEaNWWEfItxj9D1GZjn1o3WqPkW+FhQWDAWZLSKCH2PezYF3MNyO41lgJg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-roving-focus": "0.1.5", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-context": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-0.1.1.tgz", - "integrity": "sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-id": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-0.1.5.tgz", - "integrity": "sha512-IPc4H/63bes0IZ1GJJozSEkSWcDyhNGtKFWUpJ+XtaLyQ1X3x7Mf6fWwWhDcpqlYEP+5WtAvfqcyEsyjP+ZhBQ==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-layout-effect": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz", - "integrity": "sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-primitive": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz", - "integrity": "sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - }, - "@radix-ui/react-roving-focus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-0.1.5.tgz", - "integrity": "sha512-ClwKPS5JZE+PaHCoW7eu1onvE61pDv4kO8W4t5Ra3qMFQiTJLZMdpBQUhksN//DaVygoLirz4Samdr5Y1x1FSA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "0.1.0", - "@radix-ui/react-collection": "0.1.4", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-id": "0.1.5", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-use-callback-ref": "0.1.0", - "@radix-ui/react-use-controllable-state": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-collection": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-0.1.4.tgz", - "integrity": "sha512-3muGI15IdgaDFjOcO7xX8a35HQRBRF6LH9pS6UCeZeRmbslkVeHyJRQr2rzICBUoX7zgIA0kXyMDbpQnJGyJTA==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0", - "@radix-ui/react-context": "0.1.1", - "@radix-ui/react-primitive": "0.1.4", - "@radix-ui/react-slot": "0.1.2" - }, - "dependencies": { - "@radix-ui/react-slot": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-0.1.2.tgz", - "integrity": "sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "0.1.0" - } - } - } - }, - "@radix-ui/react-compose-refs": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz", - "integrity": "sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg==", - "requires": { - "@babel/runtime": "^7.13.10" - } - }, - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - }, - "@radix-ui/react-use-controllable-state": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-0.1.0.tgz", - "integrity": "sha512-zv7CX/PgsRl46a52Tl45TwqwVJdmqnlQEQhaYMz/yBOD2sx2gCkCFSoF/z9mpnYWmS6DTLNTg5lIps3fV6EnXg==", - "requires": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "0.1.0" - }, - "dependencies": { - "@radix-ui/react-use-callback-ref": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz", - "integrity": "sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw==", - "requires": { - "@babel/runtime": "^7.13.10" - } - } - } - } - } - } - } - }, - "@swc/counter": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", - "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==" - }, - "@swc/helpers": { - "version": "0.5.15", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", - "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", - "requires": { - "tslib": "^2.8.0" - } - }, - "@tailwindcss/forms": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.4.1.tgz", - "integrity": "sha512-gS9xjCmJjUBz/eP12QlENPLnf0tCx68oYE3mri0GMP5jdtVwLbGUNSRpjsp6NzLAZzZy3ueOwrcqB78Ax6Z84A==", - "requires": { - "mini-svg-data-uri": "^1.2.3" - } - }, - "@tailwindcss/typography": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.8.tgz", - "integrity": "sha512-xGQEp8KXN8Sd8m6R4xYmwxghmswrd0cPnNI2Lc6fmrC3OojysTBJJGSIVwPV56q4t6THFUK3HJ0EaWwpglSxWw==", - "requires": { - "lodash.castarray": "^4.4.0", - "lodash.isplainobject": "^4.0.6", - "lodash.merge": "^4.6.2", - "postcss-selector-parser": "6.0.10" - } - }, - "@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true - }, - "@types/lodash": { - "version": "4.14.180", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.180.tgz", - "integrity": "sha512-XOKXa1KIxtNXgASAnwj7cnttJxS4fksBRywK/9LzRV5YxrF80BXZIGeQSuoESQ/VkUj30Ae0+YcuHc15wJCB2g==", - "dev": true - }, - "@types/lodash.clonedeep": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@types/lodash.clonedeep/-/lodash.clonedeep-4.5.6.tgz", - "integrity": "sha512-cE1jYr2dEg1wBImvXlNtp0xDoS79rfEdGozQVgliDZj1uERH4k+rmEMTudP9b4VQ8O6nRb5gPqft0QzEQGMQgA==", - "dev": true, - "requires": { - "@types/lodash": "*" - } - }, - "@types/lodash.samplesize": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@types/lodash.samplesize/-/lodash.samplesize-4.2.6.tgz", - "integrity": "sha512-yBgEuIxVIM+corHdvB+NHgzni1Oc0aEd7acuO/jET0vO2Y2f6sl7vfQlaZKgzcN+ZqWLB6B2VQTKc1T5zQra+Q==", - "dev": true, - "requires": { - "@types/lodash": "*" - } - }, - "@types/lodash.throttle": { - "version": "4.1.6", - "resolved": "https://registry.npmjs.org/@types/lodash.throttle/-/lodash.throttle-4.1.6.tgz", - "integrity": "sha512-/UIH96i/sIRYGC60NoY72jGkCJtFN5KVPhEMMMTjol65effe1gPn0tycJqV5tlSwMTzX8FqzB5yAj0rfGHTPNg==", - "dev": true, - "requires": { - "@types/lodash": "*" - } - }, - "@types/node": { - "version": "17.0.21", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz", - "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==", - "dev": true - }, - "@types/parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" - }, - "@types/phoenix": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.5.4.tgz", - "integrity": "sha512-L5eZmzw89eXBKkiqVBcJfU1QGx9y+wurRIEgt0cuLH0hwNtVUxtx+6cu0R2STwWj468sjXyBYPYDtGclUd1kjQ==" - }, - "@types/prop-types": { - "version": "15.7.4", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.4.tgz", - "integrity": "sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ==", - "devOptional": true - }, - "@types/react": { - "version": "17.0.41", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.41.tgz", - "integrity": "sha512-chYZ9ogWUodyC7VUTRBfblysKLjnohhFY9bGLwvnUFFy48+vB9DikmB3lW0qTFmBcKSzmdglcvkHK71IioOlDA==", - "devOptional": true, - "requires": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - }, - "@types/scheduler": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", - "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==", - "devOptional": true - }, - "@typescript-eslint/parser": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.44.0.tgz", - "integrity": "sha512-H7LCqbZnKqkkgQHaKLGC6KUjt3pjJDx8ETDqmwncyb6PuoigYajyAwBGz08VU/l86dZWZgI4zm5k2VaKqayYyA==", - "dev": true, - "requires": { - "@typescript-eslint/scope-manager": "5.44.0", - "@typescript-eslint/types": "5.44.0", - "@typescript-eslint/typescript-estree": "5.44.0", - "debug": "^4.3.4" - } - }, - "@typescript-eslint/scope-manager": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.44.0.tgz", - "integrity": "sha512-2pKml57KusI0LAhgLKae9kwWeITZ7IsZs77YxyNyIVOwQ1kToyXRaJLl+uDEXzMN5hnobKUOo2gKntK9H1YL8g==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.44.0", - "@typescript-eslint/visitor-keys": "5.44.0" - } - }, - "@typescript-eslint/types": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.44.0.tgz", - "integrity": "sha512-Tp+zDnHmGk4qKR1l+Y1rBvpjpm5tGXX339eAlRBDg+kgZkz9Bw+pqi4dyseOZMsGuSH69fYfPJCBKBrbPCxYFQ==", - "dev": true - }, - "@typescript-eslint/typescript-estree": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.44.0.tgz", - "integrity": "sha512-M6Jr+RM7M5zeRj2maSfsZK2660HKAJawv4Ud0xT+yauyvgrsHu276VtXlKDFnEmhG+nVEd0fYZNXGoAgxwDWJw==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.44.0", - "@typescript-eslint/visitor-keys": "5.44.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/visitor-keys": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.44.0.tgz", - "integrity": "sha512-a48tLG8/4m62gPFbJ27FxwCOqPKxsb8KC3HkmYoq2As/4YyjQl1jDbRr1s63+g4FS/iIehjmN3L5UjmKva1HzQ==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.44.0", - "eslint-visitor-keys": "^3.3.0" - } - }, - "acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", - "dev": true - }, - "acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "requires": {} - }, - "acorn-node": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz", - "integrity": "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==", - "requires": { - "acorn": "^7.0.0", - "acorn-walk": "^7.0.0", - "xtend": "^4.0.2" - }, - "dependencies": { - "acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" - } - } - }, - "acorn-walk": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", - "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==" - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "anymatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", - "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", - "requires": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - } - }, - "arg": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.1.tgz", - "integrity": "sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA==" - }, - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "aria-hidden": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.2.tgz", - "integrity": "sha512-6y/ogyDTk/7YAe91T3E2PR1ALVKyM2QbTio5HwM+N1Q6CMlCKhvClyIjkckBswa0f2xJhjsfzIGa1yVSe1UMVA==", - "requires": { - "tslib": "^2.0.0" - } - }, - "aria-query": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", - "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", - "dev": true, - "requires": { - "@babel/runtime": "^7.10.2", - "@babel/runtime-corejs3": "^7.10.2" - } - }, - "array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "is-string": "^1.0.7" - } - }, - "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true - }, - "array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - } - }, - "array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - } - }, - "array.prototype.tosorted": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", - "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" - } - }, - "ast-types-flow": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", - "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==", - "dev": true - }, - "autoprefixer": { - "version": "10.4.4", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.4.tgz", - "integrity": "sha512-Tm8JxsB286VweiZ5F0anmbyGiNI3v3wGv3mz9W+cxEDYB/6jbnj6GM9H9mK3wIL8ftgl+C07Lcwb8PG5PCCPzA==", - "requires": { - "browserslist": "^4.20.2", - "caniuse-lite": "^1.0.30001317", - "fraction.js": "^4.2.0", - "normalize-range": "^0.1.2", - "picocolors": "^1.0.0", - "postcss-value-parser": "^4.2.0" - } - }, - "axe-core": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.5.2.tgz", - "integrity": "sha512-u2MVsXfew5HBvjsczCv+xlwdNnB1oQR9HlAcsejZttNjKKSkeDNVwB1vMThIUIFI9GoT57Vtk8iQLwqOfAkboA==", - "dev": true - }, - "axobject-query": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", - "integrity": "sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==", - "dev": true - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true - }, - "binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "requires": { - "fill-range": "^7.1.1" - } - }, - "browserslist": { - "version": "4.20.2", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.2.tgz", - "integrity": "sha512-CQOBCqp/9pDvDbx3xfMi+86pr4KXIf2FDkTTdeuYw8OxS9t898LA1Khq57gtufFILXpfgsSx5woNgsBgvGjpsA==", - "requires": { - "caniuse-lite": "^1.0.30001317", - "electron-to-chromium": "^1.4.84", - "escalade": "^3.1.1", - "node-releases": "^2.0.2", - "picocolors": "^1.0.0" - } - }, - "bufferutil": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.7.tgz", - "integrity": "sha512-kukuqc39WOHtdxtw4UScxF/WVnMFVSQVKhtx3AjZJzhd0RGZZldcrfSEbVsWWe6KNH253574cq5F+wpv0G9pJw==", - "requires": { - "node-gyp-build": "^4.3.0" - } - }, - "busboy": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", - "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", - "requires": { - "streamsearch": "^1.1.0" - } - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dev": true, - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" - }, - "camelcase-css": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", - "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" - }, - "caniuse-lite": { - "version": "1.0.30001689", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001689.tgz", - "integrity": "sha512-CmeR2VBycfa+5/jOfnp/NpWPGd06nf1XYiefUvhXFfZE4GkRc9jv+eGPS4nT558WS/8lYCzV8SlANCIPvbWP1g==" - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", - "requires": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "fsevents": "~2.3.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "dependencies": { - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "requires": { - "is-glob": "^4.0.1" - } - } - } - }, - "client-only": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", - "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" - }, - "color": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", - "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", - "optional": true, - "requires": { - "color-convert": "^2.0.1", - "color-string": "^1.9.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "color-string": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", - "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", - "optional": true, - "requires": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "core-js-pure": { - "version": "3.26.1", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.26.1.tgz", - "integrity": "sha512-VVXcDpp/xJ21KdULRq/lXdLzQAtX7+37LzpyfFM973il0tWSsDEoyzG38G14AjTpK9VTfiNM9jnFauq/CpaWGQ==", - "dev": true - }, - "cosmiconfig": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", - "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", - "requires": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" - } - }, - "cross-fetch": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz", - "integrity": "sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==", - "requires": { - "node-fetch": "2.6.7" - } - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" - }, - "csstype": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.11.tgz", - "integrity": "sha512-sa6P2wJ+CAbgyy4KFssIb/JNMLxFvKF1pCYCSXS8ZMuqZnMsrxqI2E5sPyoTpxoPU/gVZMzr2zjOfg8GIZOMsw==" - }, - "d": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", - "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", - "requires": { - "es5-ext": "^0.10.50", - "type": "^1.0.1" - } - }, - "damerau-levenshtein": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", - "dev": true - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "deepmerge": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", - "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" - }, - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "dev": true, - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - }, - "defined": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", - "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" - }, - "detect-libc": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", - "optional": true - }, - "detect-node-es": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", - "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" - }, - "detective": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/detective/-/detective-5.2.0.tgz", - "integrity": "sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg==", - "requires": { - "acorn-node": "^1.6.1", - "defined": "^1.0.0", - "minimist": "^1.1.1" - } - }, - "didyoumean": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", - "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" - }, - "dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "requires": { - "path-type": "^4.0.0" - } - }, - "dlv": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", - "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" - }, - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "electron-to-chromium": { - "version": "1.4.93", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.93.tgz", - "integrity": "sha512-ywq9Pc5Gwwpv7NG767CtoU8xF3aAUQJjH9//Wy3MBCg4w5JSLbJUq2L8IsCdzPMjvSgxuue9WcVaTOyyxCL0aQ==" - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "es-abstract": { - "version": "1.20.4", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.4.tgz", - "integrity": "sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - } - }, - "es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", - "dev": true, - "requires": { - "has": "^1.0.3" - } - }, - "es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "es5-ext": { - "version": "0.10.64", - "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz", - "integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==", - "requires": { - "es6-iterator": "^2.0.3", - "es6-symbol": "^3.1.3", - "esniff": "^2.0.1", - "next-tick": "^1.1.0" - } - }, - "es6-iterator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", - "integrity": "sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==", - "requires": { - "d": "1", - "es5-ext": "^0.10.35", - "es6-symbol": "^3.1.1" - } - }, - "es6-symbol": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", - "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", - "requires": { - "d": "^1.0.1", - "ext": "^1.1.2" - } - }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true - }, - "eslint": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.11.0.tgz", - "integrity": "sha512-/KRpd9mIRg2raGxHRGwW9ZywYNAClZrHjdueHcrVDuO3a6bj83eoTirCCk0M0yPwOjWYKHwRVRid+xK4F/GHgA==", - "dev": true, - "requires": { - "@eslint/eslintrc": "^1.2.1", - "@humanwhocodes/config-array": "^0.9.2", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.1", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.3.0", - "espree": "^9.3.1", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^6.0.1", - "globals": "^13.6.0", - "ignore": "^5.2.0", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "regexpp": "^3.2.0", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "dependencies": { - "doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - } - } - }, - "eslint-config-next": { - "version": "12.3.4", - "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-12.3.4.tgz", - "integrity": "sha512-WuT3gvgi7Bwz00AOmKGhOeqnyA5P29Cdyr0iVjLyfDbk+FANQKcOjFUTZIdyYfe5Tq1x4TGcmoe4CwctGvFjHQ==", - "dev": true, - "requires": { - "@next/eslint-plugin-next": "12.3.4", - "@rushstack/eslint-patch": "^1.1.3", - "@typescript-eslint/parser": "^5.21.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-import-resolver-typescript": "^2.7.1", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.5.1", - "eslint-plugin-react": "^7.31.7", - "eslint-plugin-react-hooks": "^4.5.0" - } - }, - "eslint-import-resolver-node": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", - "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", - "dev": true, - "requires": { - "debug": "^3.2.7", - "resolve": "^1.20.0" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-import-resolver-typescript": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz", - "integrity": "sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==", - "dev": true, - "requires": { - "debug": "^4.3.4", - "glob": "^7.2.0", - "is-glob": "^4.0.3", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - } - }, - "eslint-module-utils": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", - "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", - "dev": true, - "requires": { - "debug": "^3.2.7" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-plugin-import": { - "version": "2.26.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", - "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", - "dev": true, - "requires": { - "array-includes": "^3.1.4", - "array.prototype.flat": "^1.2.5", - "debug": "^2.6.9", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.7.3", - "has": "^1.0.3", - "is-core-module": "^2.8.1", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.values": "^1.1.5", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true - } - } - }, - "eslint-plugin-jsx-a11y": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz", - "integrity": "sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==", - "dev": true, - "requires": { - "@babel/runtime": "^7.18.9", - "aria-query": "^4.2.2", - "array-includes": "^3.1.5", - "ast-types-flow": "^0.0.7", - "axe-core": "^4.4.3", - "axobject-query": "^2.2.0", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "has": "^1.0.3", - "jsx-ast-utils": "^3.3.2", - "language-tags": "^1.0.5", - "minimatch": "^3.1.2", - "semver": "^6.3.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - } - } - }, - "eslint-plugin-react": { - "version": "7.31.11", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.11.tgz", - "integrity": "sha512-TTvq5JsT5v56wPa9OYHzsrOlHzKZKjV+aLgS+55NJP/cuzdiQPC7PfYoUjMoxlffKtvijpk7vA/jmuqRb9nohw==", - "dev": true, - "requires": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", - "doctrine": "^2.1.0", - "estraverse": "^5.3.0", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.3", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.8" - }, - "dependencies": { - "resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", - "dev": true, - "requires": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - } - }, - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - } - } - }, - "eslint-plugin-react-hooks": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", - "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", - "dev": true, - "requires": {} - }, - "eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", - "dev": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - } - }, - "eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^2.0.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true - } - } - }, - "eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", - "dev": true - }, - "esniff": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz", - "integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==", - "requires": { - "d": "^1.0.1", - "es5-ext": "^0.10.62", - "event-emitter": "^0.3.5", - "type": "^2.7.2" - }, - "dependencies": { - "type": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz", - "integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==" - } - } - }, - "espree": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.1.tgz", - "integrity": "sha512-bvdyLmJMfwkV3NCRl5ZhJf22zBFo1y8bYh3VYb+bfzqNB4Je68P2sSuXyuFquzWLebHpNd2/d5uv7yoP9ISnGQ==", - "dev": true, - "requires": { - "acorn": "^8.7.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^3.3.0" - } - }, - "esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "dev": true, - "requires": { - "estraverse": "^5.1.0" - } - }, - "esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "requires": { - "estraverse": "^5.2.0" - } - }, - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true - }, - "event-emitter": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", - "integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==", - "requires": { - "d": "1", - "es5-ext": "~0.10.14" - } - }, - "ext": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/ext/-/ext-1.7.0.tgz", - "integrity": "sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==", - "requires": { - "type": "^2.7.2" - }, - "dependencies": { - "type": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz", - "integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==" - } - } - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "fast-glob": { - "version": "3.2.11", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", - "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "dependencies": { - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "requires": { - "is-glob": "^4.0.1" - } - } - } - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, - "fastq": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", - "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", - "requires": { - "reusify": "^1.0.4" - } - }, - "file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "requires": { - "flat-cache": "^3.0.4" - } - }, - "fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "requires": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - } - }, - "flatted": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", - "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", - "dev": true - }, - "formik": { - "version": "2.2.9", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.2.9.tgz", - "integrity": "sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA==", - "requires": { - "deepmerge": "^2.1.1", - "hoist-non-react-statics": "^3.3.0", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "react-fast-compare": "^2.0.1", - "tiny-warning": "^1.0.2", - "tslib": "^1.10.0" - }, - "dependencies": { - "deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==" - }, - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "fraction.js": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", - "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==" - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "optional": true - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - } - }, - "functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true - }, - "functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "dev": true - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "dev": true, - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "get-nonce": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", - "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==" - }, - "get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - } - }, - "glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "requires": { - "is-glob": "^4.0.3" - } - }, - "globals": { - "version": "13.13.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.13.0.tgz", - "integrity": "sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A==", - "dev": true, - "requires": { - "type-fest": "^0.20.2" - } - }, - "globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "requires": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - } - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dev": true, - "requires": { - "get-intrinsic": "^1.1.1" - } - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true - }, - "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dev": true, - "requires": { - "has-symbols": "^1.0.2" - } - }, - "hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "requires": { - "react-is": "^16.7.0" - } - }, - "ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", - "dev": true - }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - } - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "internal-slot": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", - "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", - "dev": true, - "requires": { - "get-intrinsic": "^1.1.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - } - }, - "invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "requires": { - "loose-envify": "^1.0.0" - } - }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" - }, - "is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "dev": true, - "requires": { - "has-bigints": "^1.0.1" - } - }, - "is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "requires": { - "binary-extensions": "^2.0.0" - } - }, - "is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "dev": true - }, - "is-core-module": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", - "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", - "requires": { - "has": "^1.0.3" - } - }, - "is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "dev": true, - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=" - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", - "dev": true - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "dev": true, - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "dev": true, - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "dev": true, - "requires": { - "has-symbols": "^1.0.2" - } - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" - }, - "is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2" - } - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "requires": { - "argparse": "^2.0.1" - } - }, - "json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true - }, - "json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", - "dev": true, - "requires": { - "minimist": "^1.2.0" - } - }, - "jsx-ast-utils": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", - "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", - "dev": true, - "requires": { - "array-includes": "^3.1.5", - "object.assign": "^4.1.3" - } - }, - "language-subtag-registry": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", - "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==", - "dev": true - }, - "language-tags": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", - "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", - "dev": true, - "requires": { - "language-subtag-registry": "~0.3.2" - } - }, - "levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "requires": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - } - }, - "lilconfig": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.5.tgz", - "integrity": "sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg==" - }, - "lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" - }, - "lodash.castarray": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz", - "integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==" - }, - "lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" - }, - "lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" - }, - "lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" - }, - "lodash.samplesize": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.samplesize/-/lodash.samplesize-4.2.0.tgz", - "integrity": "sha1-Rgdi+7KzQikFF0mekNUVhttGX/k=" - }, - "lodash.throttle": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz", - "integrity": "sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=" - }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" - }, - "micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "requires": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - } - }, - "mini-svg-data-uri": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz", - "integrity": "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==" - }, - "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", - "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==" - }, - "natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "next": { - "version": "15.2.4", - "resolved": "https://registry.npmjs.org/next/-/next-15.2.4.tgz", - "integrity": "sha512-VwL+LAaPSxEkd3lU2xWbgEOtrM8oedmyhBqaVNmgKB+GvZlCy9rgaEc+y2on0wv+l0oSFqLtYD6dcC1eAedUaQ==", - "requires": { - "@next/env": "15.2.4", - "@next/swc-darwin-arm64": "15.2.4", - "@next/swc-darwin-x64": "15.2.4", - "@next/swc-linux-arm64-gnu": "15.2.4", - "@next/swc-linux-arm64-musl": "15.2.4", - "@next/swc-linux-x64-gnu": "15.2.4", - "@next/swc-linux-x64-musl": "15.2.4", - "@next/swc-win32-arm64-msvc": "15.2.4", - "@next/swc-win32-x64-msvc": "15.2.4", - "@swc/counter": "0.1.3", - "@swc/helpers": "0.5.15", - "busboy": "1.6.0", - "caniuse-lite": "^1.0.30001579", - "postcss": "8.4.31", - "sharp": "^0.33.5", - "styled-jsx": "5.1.6" - }, - "dependencies": { - "postcss": { - "version": "8.4.31", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", - "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", - "requires": { - "nanoid": "^3.3.6", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - } - } - } - }, - "next-tick": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", - "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==" - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "node-gyp-build": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.5.0.tgz", - "integrity": "sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg==" - }, - "node-releases": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.2.tgz", - "integrity": "sha512-XxYDdcQ6eKqp/YjI+tb2C5WM2LgjnZrfYg4vgQt49EK268b6gYCHsBLrK2qvJo4FmCtqmKezb0WZFK4fkrZNsg==" - }, - "normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" - }, - "normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=" - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" - }, - "object-hash": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", - "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==" - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", - "dev": true - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true - }, - "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - } - }, - "object.entries": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz", - "integrity": "sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "object.hasown": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", - "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", - "dev": true, - "requires": { - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "dev": true, - "requires": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - } - }, - "parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "requires": { - "callsites": "^3.0.0" - } - }, - "parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "requires": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" - }, - "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" - }, - "postcss": { - "version": "8.4.32", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.32.tgz", - "integrity": "sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==", - "requires": { - "nanoid": "^3.3.7", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - } - }, - "postcss-js": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.0.tgz", - "integrity": "sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ==", - "requires": { - "camelcase-css": "^2.0.1" - } - }, - "postcss-nested": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-5.0.6.tgz", - "integrity": "sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA==", - "requires": { - "postcss-selector-parser": "^6.0.6" - } - }, - "postcss-selector-parser": { - "version": "6.0.10", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", - "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", - "requires": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - } - }, - "postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" - }, - "prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true - }, - "prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true - }, - "queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" - }, - "quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==" - }, - "react": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", - "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "react-dom": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", - "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1", - "scheduler": "^0.20.2" - } - }, - "react-fast-compare": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", - "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==" - }, - "react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "react-remove-scroll": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", - "integrity": "sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==", - "requires": { - "react-remove-scroll-bar": "^2.3.3", - "react-style-singleton": "^2.2.1", - "tslib": "^2.1.0", - "use-callback-ref": "^1.3.0", - "use-sidecar": "^1.1.2" - } - }, - "react-remove-scroll-bar": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz", - "integrity": "sha512-63C4YQBUt0m6ALadE9XV56hV8BgJWDmmTPY758iIJjfQKt2nYwoUrPk0LXRXcB/yIj82T1/Ixfdpdk68LwIB0A==", - "requires": { - "react-style-singleton": "^2.2.1", - "tslib": "^2.0.0" - } - }, - "react-style-singleton": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", - "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==", - "requires": { - "get-nonce": "^1.0.0", - "invariant": "^2.2.4", - "tslib": "^2.0.0" - } - }, - "readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "requires": { - "picomatch": "^2.2.1" - } - }, - "regenerator-runtime": { - "version": "0.13.11", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", - "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", - "dev": true - }, - "regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - } - }, - "regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true - }, - "resolve": { - "version": "1.22.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", - "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", - "requires": { - "is-core-module": "^2.8.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - } - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "requires": { - "queue-microtask": "^1.2.2" - } - }, - "safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - } - }, - "scheduler": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", - "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "devOptional": true - }, - "sharp": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", - "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", - "optional": true, - "requires": { - "@img/sharp-darwin-arm64": "0.33.5", - "@img/sharp-darwin-x64": "0.33.5", - "@img/sharp-libvips-darwin-arm64": "1.0.4", - "@img/sharp-libvips-darwin-x64": "1.0.4", - "@img/sharp-libvips-linux-arm": "1.0.5", - "@img/sharp-libvips-linux-arm64": "1.0.4", - "@img/sharp-libvips-linux-s390x": "1.0.4", - "@img/sharp-libvips-linux-x64": "1.0.4", - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", - "@img/sharp-libvips-linuxmusl-x64": "1.0.4", - "@img/sharp-linux-arm": "0.33.5", - "@img/sharp-linux-arm64": "0.33.5", - "@img/sharp-linux-s390x": "0.33.5", - "@img/sharp-linux-x64": "0.33.5", - "@img/sharp-linuxmusl-arm64": "0.33.5", - "@img/sharp-linuxmusl-x64": "0.33.5", - "@img/sharp-wasm32": "0.33.5", - "@img/sharp-win32-ia32": "0.33.5", - "@img/sharp-win32-x64": "0.33.5", - "color": "^4.2.3", - "detect-libc": "^2.0.3", - "semver": "^7.6.3" - } - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - } - }, - "simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", - "optional": true, - "requires": { - "is-arrayish": "^0.3.1" - }, - "dependencies": { - "is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "optional": true - } - } - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true - }, - "source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" - }, - "streamsearch": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", - "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==" - }, - "string.prototype.matchall": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", - "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.3", - "side-channel": "^1.0.4" - } - }, - "string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", - "dev": true - }, - "strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true - }, - "styled-jsx": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz", - "integrity": "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==", - "requires": { - "client-only": "0.0.1" - } - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - }, - "supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" - }, - "tailwindcss": { - "version": "3.0.23", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.0.23.tgz", - "integrity": "sha512-+OZOV9ubyQ6oI2BXEhzw4HrqvgcARY38xv3zKcjnWtMIZstEsXdI9xftd1iB7+RbOnj2HOEzkA0OyB5BaSxPQA==", - "requires": { - "arg": "^5.0.1", - "chalk": "^4.1.2", - "chokidar": "^3.5.3", - "color-name": "^1.1.4", - "cosmiconfig": "^7.0.1", - "detective": "^5.2.0", - "didyoumean": "^1.2.2", - "dlv": "^1.1.3", - "fast-glob": "^3.2.11", - "glob-parent": "^6.0.2", - "is-glob": "^4.0.3", - "normalize-path": "^3.0.0", - "object-hash": "^2.2.0", - "postcss": "^8.4.6", - "postcss-js": "^4.0.0", - "postcss-load-config": "^3.1.0", - "postcss-nested": "5.0.6", - "postcss-selector-parser": "^6.0.9", - "postcss-value-parser": "^4.2.0", - "quick-lru": "^5.1.1", - "resolve": "^1.22.0" - }, - "dependencies": { - "postcss-load-config": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.3.tgz", - "integrity": "sha512-5EYgaM9auHGtO//ljHH+v/aC/TQ5LHXtL7bQajNAUBKUVKiYE8rYpFms7+V26D9FncaGe2zwCoPQsFKb5zF/Hw==", - "requires": { - "lilconfig": "^2.0.4", - "yaml": "^1.10.2" - } - } - } - }, - "tailwindcss-radix": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/tailwindcss-radix/-/tailwindcss-radix-1.6.0.tgz", - "integrity": "sha512-5oBgGCVGsITMiUVlc6Euj4kt03l8htLJxVT9AXbkFxcJiXLtQxJriFq/8R+3s63OKit/ynCVdkqvlnW6H7iG1g==" - }, - "text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true - }, - "tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "tsconfig-paths": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", - "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", - "dev": true, - "requires": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } - }, - "tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" - }, - "tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "requires": { - "tslib": "^1.8.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - } - } - }, - "type": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", - "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==" - }, - "type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "requires": { - "prelude-ls": "^1.2.1" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true - }, - "typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "requires": { - "is-typedarray": "^1.0.0" - } - }, - "typescript": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.2.tgz", - "integrity": "sha512-HM/hFigTBHZhLXshn9sN37H085+hQGeJHJ/X7LpBWLID/fbc2acUMfU+lGD98X81sKP+pFa9f0DZmCwB9GnbAg==", - "dev": true - }, - "unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - } - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "use-callback-ref": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.0.tgz", - "integrity": "sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w==", - "requires": { - "tslib": "^2.0.0" - } - }, - "use-sidecar": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz", - "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==", - "requires": { - "detect-node-es": "^1.1.0", - "tslib": "^2.0.0" - } - }, - "utf-8-validate": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.10.tgz", - "integrity": "sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ==", - "requires": { - "node-gyp-build": "^4.3.0" - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "websocket": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/websocket/-/websocket-1.0.34.tgz", - "integrity": "sha512-PRDso2sGwF6kM75QykIesBijKSVceR6jL2G8NGYyq2XrItNC2P5/qL5XeR056GhA+Ly7JMFvJb9I312mJfmqnQ==", - "requires": { - "bufferutil": "^4.0.1", - "debug": "^2.2.0", - "es5-ext": "^0.10.50", - "typedarray-to-buffer": "^3.1.5", - "utf-8-validate": "^5.0.2", - "yaeti": "^0.0.6" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - } - } - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "dev": true, - "requires": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - } - }, - "word-wrap": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", - "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", - "dev": true - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" - }, - "yaeti": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/yaeti/-/yaeti-0.0.6.tgz", - "integrity": "sha512-MvQa//+KcZCUkBTIC9blM+CU9J2GzuTytsOUwf2lidtvkx/6gnEp1QvJv34t9vdjhFmha/mUiNDbN0D0mJWdug==" - }, - "yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" - } - } -} diff --git a/demo/package.json b/demo/package.json deleted file mode 100644 index 00dcfcf0b..000000000 --- a/demo/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "demo", - "version": "0.1.2", - "private": true, - "scripts": { - "dev": "next dev", - "build": "next build", - "start": "next start", - "lint": "next lint" - }, - "dependencies": { - "@supabase/supabase-js": "^2.1.0", - "@supabase/ui": "0.37.0-alpha.81", - "lodash.clonedeep": "^4.5.0", - "lodash.samplesize": "^4.2.0", - "lodash.throttle": "^4.1.1", - "next": "^15.2.4", - "react": "17.0.2", - "react-dom": "17.0.2" - }, - "devDependencies": { - "@types/lodash.clonedeep": "^4.5.6", - "@types/lodash.samplesize": "^4.2.6", - "@types/lodash.throttle": "^4.1.6", - "@types/node": "17.0.21", - "@types/react": "17.0.41", - "autoprefixer": "^10.4.4", - "eslint": "8.11.0", - "eslint-config-next": "^12.3.4", - "postcss": "^8.4.31", - "tailwindcss": "^3.0.23", - "typescript": "4.6.2" - } -} diff --git a/demo/pages/[...slug].tsx b/demo/pages/[...slug].tsx deleted file mode 100644 index 07c5be211..000000000 --- a/demo/pages/[...slug].tsx +++ /dev/null @@ -1,578 +0,0 @@ -import { useEffect, useState, useRef, ReactElement } from 'react' -import type { NextPage } from 'next' -import { useRouter } from 'next/router' -import { nanoid } from 'nanoid' -import cloneDeep from 'lodash.clonedeep' -import throttle from 'lodash.throttle' -import { Badge } from '@supabase/ui' -import { - PostgrestResponse, - REALTIME_LISTEN_TYPES, - REALTIME_POSTGRES_CHANGES_LISTEN_EVENT, - REALTIME_PRESENCE_LISTEN_EVENTS, - REALTIME_SUBSCRIBE_STATES, - RealtimeChannel, - RealtimeChannelSendResponse, - RealtimePostgresInsertPayload, -} from '@supabase/supabase-js' - -import supabaseClient from '../client' -import { Coordinates, Message, Payload, User } from '../types' -import { removeFirst } from '../utils' -import { getRandomColor, getRandomColors, getRandomUniqueColor } from '../lib/RandomColor' -import { sendLog } from '../lib/sendLog' - -import Chatbox from '../components/Chatbox' -import Cursor from '../components/Cursor' -import Loader from '../components/Loader' -import Users from '../components/Users' -import WaitlistPopover from '../components/WaitlistPopover' -import DarkModeToggle from '../components/DarkModeToggle' - -const LATENCY_THRESHOLD = 400 -const MAX_ROOM_USERS = 50 -const MAX_DISPLAY_MESSAGES = 50 -const MAX_EVENTS_PER_SECOND = 10 -const X_THRESHOLD = 25 -const Y_THRESHOLD = 35 - -// Generate a random user id -const userId = nanoid() - -const Room: NextPage = () => { - const router = useRouter() - - const localColorBackup = getRandomColor() - - const chatboxRef = useRef() - // [Joshen] Super hacky fix for a really weird bug for onKeyDown - // input field. For some reason the first keydown event appends the character twice - const chatInputFix = useRef(true) - - // These states will be managed via ref as they're mutated within event listeners - const usersRef = useRef<{ [key: string]: User }>({}) - const isTypingRef = useRef(false) - const isCancelledRef = useRef(false) - const messageRef = useRef() - const messagesInTransitRef = useRef() - const mousePositionRef = useRef() - - const joinTimestampRef = useRef() - const insertMsgTimestampRef = useRef() - - // We manage the refs with a state so that the UI can re-render - const [isTyping, _setIsTyping] = useState(false) - const [isCancelled, _setIsCancelled] = useState(false) - const [message, _setMessage] = useState('') - const [messagesInTransit, _setMessagesInTransit] = useState([]) - const [mousePosition, _setMousePosition] = useState() - - const [areMessagesFetched, setAreMessagesFetched] = useState(false) - const [isInitialStateSynced, setIsInitialStateSynced] = useState(false) - const [latency, setLatency] = useState(0) - const [messages, setMessages] = useState([]) - const [roomId, setRoomId] = useState(undefined) - const [users, setUsers] = useState<{ [key: string]: User }>({}) - - const setIsTyping = (value: boolean) => { - isTypingRef.current = value - _setIsTyping(value) - } - - const setIsCancelled = (value: boolean) => { - isCancelledRef.current = value - _setIsCancelled(value) - } - - const setMessage = (value: string) => { - messageRef.current = value - _setMessage(value) - } - - const setMousePosition = (coordinates: Coordinates) => { - mousePositionRef.current = coordinates - _setMousePosition(coordinates) - } - - const setMessagesInTransit = (messages: string[]) => { - messagesInTransitRef.current = messages - _setMessagesInTransit(messages) - } - - const mapInitialUsers = (userChannel: RealtimeChannel, roomId: string) => { - const state = userChannel.presenceState() - const _users = state[roomId] - - if (!_users) return - - // Deconflict duplicate colours at the beginning of the browser session - const colors = Object.keys(usersRef.current).length === 0 ? getRandomColors(_users.length) : [] - - if (_users) { - setUsers((existingUsers) => { - const updatedUsers = _users.reduce( - (acc: { [key: string]: User }, { user_id: userId }: any, index: number) => { - const userColors = Object.values(usersRef.current).map((user: any) => user.color) - // Deconflict duplicate colors for incoming clients during the browser session - const color = colors.length > 0 ? colors[index] : getRandomUniqueColor(userColors) - - acc[userId] = existingUsers[userId] || { - x: 0, - y: 0, - color: color.bg, - hue: color.hue, - } - return acc - }, - {} - ) - usersRef.current = updatedUsers - return updatedUsers - }) - } - } - - useEffect(() => { - let roomChannel: RealtimeChannel - - const { slug } = router.query - const slugRoomId = Array.isArray(slug) ? slug[0] : undefined - - if (!roomId) { - // roomId is undefined when user first attempts to join a room - - joinTimestampRef.current = performance.now() - - /* - Client is joining 'rooms' channel to examine existing rooms and their users - and then the channel is removed once a room is selected - */ - roomChannel = supabaseClient.channel('rooms') - - roomChannel - .on(REALTIME_LISTEN_TYPES.PRESENCE, { event: REALTIME_PRESENCE_LISTEN_EVENTS.SYNC }, () => { - let newRoomId - const state = roomChannel.presenceState() - - // User attempting to navigate directly to an existing room with users - if (slugRoomId && slugRoomId in state && state[slugRoomId].length < MAX_ROOM_USERS) { - newRoomId = slugRoomId - } - - // User will be assigned an existing room with the fewest users - if (!newRoomId) { - const [mostVacantRoomId, users] = - Object.entries(state).sort(([, a], [, b]) => a.length - b.length)[0] ?? [] - - if (users && users.length < MAX_ROOM_USERS) { - newRoomId = mostVacantRoomId - } - } - - // Generate an id if no existing rooms are available - setRoomId(newRoomId ?? nanoid()) - }) - .subscribe() - } else { - // When user has been placed in a room - - joinTimestampRef.current && - sendLog( - `User ${userId} joined Room ${roomId} in ${( - performance.now() - joinTimestampRef.current - ).toFixed(1)} ms` - ) - - /* - Client is re-joining 'rooms' channel and the user's id will be tracked with Presence. - - Note: Realtime enforces unique channel names per client so the previous 'rooms' channel - has already been removed in the cleanup function. - */ - roomChannel = supabaseClient.channel('rooms', { config: { presence: { key: roomId } } }) - roomChannel.on( - REALTIME_LISTEN_TYPES.PRESENCE, - { event: REALTIME_PRESENCE_LISTEN_EVENTS.SYNC }, - () => { - setIsInitialStateSynced(true) - mapInitialUsers(roomChannel, roomId) - } - ) - roomChannel.subscribe(async (status: `${REALTIME_SUBSCRIBE_STATES}`) => { - if (status === REALTIME_SUBSCRIBE_STATES.SUBSCRIBED) { - const resp: RealtimeChannelSendResponse = await roomChannel.track({ user_id: userId }) - - if (resp === 'ok') { - router.push(`/${roomId}`) - } else { - router.push(`/`) - } - } - }) - - // Get the room's existing messages that were saved to database - supabaseClient - .from('messages') - .select('id, user_id, message') - .filter('room_id', 'eq', roomId) - .order('created_at', { ascending: false }) - .limit(MAX_DISPLAY_MESSAGES) - .then((resp: PostgrestResponse) => { - resp.data && setMessages(resp.data.reverse()) - setAreMessagesFetched(true) - if (chatboxRef.current) chatboxRef.current.scrollIntoView({ behavior: 'smooth' }) - }) - } - - // Must properly remove subscribed channel - return () => { - roomChannel && supabaseClient.removeChannel(roomChannel) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [roomId]) - - useEffect(() => { - if (!roomId || !isInitialStateSynced) return - - let pingIntervalId: ReturnType | undefined - let messageChannel: RealtimeChannel, pingChannel: RealtimeChannel - let setMouseEvent: (e: MouseEvent) => void = () => {}, - onKeyDown: (e: KeyboardEvent) => void = () => {} - - // Ping channel is used to calculate roundtrip time from client to server to client - pingChannel = supabaseClient.channel(`ping:${userId}`, { - config: { broadcast: { ack: true } }, - }) - pingChannel.subscribe((status: `${REALTIME_SUBSCRIBE_STATES}`) => { - if (status === REALTIME_SUBSCRIBE_STATES.SUBSCRIBED) { - pingIntervalId = setInterval(async () => { - const start = performance.now() - const resp = await pingChannel.send({ - type: 'broadcast', - event: 'PING', - payload: {}, - }) - - if (resp !== 'ok') { - console.log('pingChannel broadcast error') - setLatency(-1) - } else { - const end = performance.now() - const newLatency = end - start - - if (newLatency >= LATENCY_THRESHOLD) { - sendLog( - `Roundtrip Latency for User ${userId} surpassed ${LATENCY_THRESHOLD} ms at ${newLatency.toFixed( - 1 - )} ms` - ) - } - - setLatency(newLatency) - } - }, 1000) - } - }) - - messageChannel = supabaseClient.channel(`chat_messages:${roomId}`) - - // Listen for messages inserted into the database - messageChannel.on( - REALTIME_LISTEN_TYPES.POSTGRES_CHANGES, - { - event: REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.INSERT, - schema: 'public', - table: 'messages', - filter: `room_id=eq.${roomId}`, - }, - ( - payload: RealtimePostgresInsertPayload<{ - id: number - created_at: string - message: string - user_id: string - room_id: string - }> - ) => { - if (payload.new.user_id === userId && insertMsgTimestampRef.current) { - sendLog( - `Message Latency for User ${userId} from insert to receive was ${( - performance.now() - insertMsgTimestampRef.current - ).toFixed(1)} ms` - ) - insertMsgTimestampRef.current = undefined - } - - setMessages((prevMsgs: Message[]) => { - const messages = prevMsgs.slice(-MAX_DISPLAY_MESSAGES + 1) - const msg = (({ id, message, room_id, user_id }) => ({ - id, - message, - room_id, - user_id, - }))(payload.new) - messages.push(msg) - - if (msg.user_id === userId) { - const updatedMessagesInTransit = removeFirst( - messagesInTransitRef?.current ?? [], - msg.message - ) - setMessagesInTransit(updatedMessagesInTransit) - } - - return messages - }) - - if (chatboxRef.current) { - chatboxRef.current.scrollIntoView({ behavior: 'smooth' }) - } - } - ) - - // Listen for cursor positions from other users in the room - messageChannel.on( - REALTIME_LISTEN_TYPES.BROADCAST, - { event: 'POS' }, - (payload: Payload<{ user_id: string } & Coordinates>) => { - setUsers((users) => { - const userId = payload!.payload!.user_id - const existingUser = users[userId] - - if (existingUser) { - const x = - (payload?.payload?.x ?? 0) - X_THRESHOLD > window.innerWidth - ? window.innerWidth - X_THRESHOLD - : payload?.payload?.x - const y = - (payload?.payload?.y ?? 0 - Y_THRESHOLD) > window.innerHeight - ? window.innerHeight - Y_THRESHOLD - : payload?.payload?.y - - users[userId] = { ...existingUser, ...{ x, y } } - users = cloneDeep(users) - } - - return users - }) - } - ) - - // Listen for messages sent by other users directly via Broadcast - messageChannel.on( - REALTIME_LISTEN_TYPES.BROADCAST, - { event: 'MESSAGE' }, - (payload: Payload<{ user_id: string; isTyping: boolean; message: string }>) => { - setUsers((users) => { - const userId = payload!.payload!.user_id - const existingUser = users[userId] - - if (existingUser) { - users[userId] = { - ...existingUser, - ...{ isTyping: payload?.payload?.isTyping, message: payload?.payload?.message }, - } - users = cloneDeep(users) - } - - return users - }) - } - ) - messageChannel.subscribe((status: `${REALTIME_SUBSCRIBE_STATES}`) => { - if (status === REALTIME_SUBSCRIBE_STATES.SUBSCRIBED) { - // Lodash throttle will be removed once realtime-js client throttles on the channel level - const sendMouseBroadcast = throttle(({ x, y }) => { - messageChannel - .send({ - type: 'broadcast', - event: 'POS', - payload: { user_id: userId, x, y }, - }) - .catch(() => {}) - }, 1000 / MAX_EVENTS_PER_SECOND) - - setMouseEvent = (e: MouseEvent) => { - const [x, y] = [e.clientX, e.clientY] - sendMouseBroadcast({ x, y }) - setMousePosition({ x, y }) - } - - onKeyDown = async (e: KeyboardEvent) => { - if (document.activeElement?.id === 'email') return - - // Start typing session - if (e.code === 'Enter' || (e.key.length === 1 && !e.metaKey)) { - if (!isTypingRef.current) { - setIsTyping(true) - setIsCancelled(false) - - if (chatInputFix.current) { - setMessage('') - chatInputFix.current = false - } else { - setMessage(e.key.length === 1 ? e.key : '') - } - messageChannel - .send({ - type: 'broadcast', - event: 'MESSAGE', - payload: { user_id: userId, isTyping: true, message: '' }, - }) - .catch(() => {}) - } else if (e.code === 'Enter') { - // End typing session and send message - setIsTyping(false) - messageChannel - .send({ - type: 'broadcast', - event: 'MESSAGE', - payload: { user_id: userId, isTyping: false, message: messageRef.current }, - }) - .catch(() => {}) - if (messageRef.current) { - const updatedMessagesInTransit = (messagesInTransitRef?.current ?? []).concat([ - messageRef.current, - ]) - setMessagesInTransit(updatedMessagesInTransit) - if (chatboxRef.current) chatboxRef.current.scrollIntoView({ behavior: 'smooth' }) - insertMsgTimestampRef.current = performance.now() - await supabaseClient.from('messages').insert([ - { - user_id: userId, - room_id: roomId, - message: messageRef.current, - }, - ]) - } - } - } - - // End typing session without sending - if (e.code === 'Escape' && isTypingRef.current) { - setIsTyping(false) - setIsCancelled(true) - chatInputFix.current = true - - messageChannel - .send({ - type: 'broadcast', - event: 'MESSAGE', - payload: { user_id: userId, isTyping: false, message: '' }, - }) - .catch(() => {}) - } - } - - window.addEventListener('mousemove', setMouseEvent) - window.addEventListener('keydown', onKeyDown) - } - }) - - return () => { - pingIntervalId && clearInterval(pingIntervalId) - - window.removeEventListener('mousemove', setMouseEvent) - window.removeEventListener('keydown', onKeyDown) - - pingChannel && supabaseClient.removeChannel(pingChannel) - messageChannel && supabaseClient.removeChannel(messageChannel) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [roomId, isInitialStateSynced]) - - if (!roomId) { - return - } - - return ( -
-
-
-
- - -
-
-
- - Latency: {latency.toFixed(1)}ms -
-
- -
-
-
- -
-
-

Chat

- - ↩ - -
-
-

Escape

- - ESC - -
-
- - {Object.entries(users).reduce((acc, [userId, data]) => { - const { x, y, color, message, isTyping, hue } = data - if (x && y) { - acc.push( - - ) - } - return acc - }, [] as ReactElement[])} - - {/* Cursor for local client: Shouldn't show the cursor itself, only the text bubble */} - {Number.isInteger(mousePosition?.x) && Number.isInteger(mousePosition?.y) && ( - - )} -
- ) -} - -export default Room diff --git a/demo/pages/_app.tsx b/demo/pages/_app.tsx deleted file mode 100644 index c55f6089f..000000000 --- a/demo/pages/_app.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import '../styles/globals.css' -import type { AppProps } from 'next/app' -import Head from 'next/head' -import { ThemeProvider } from '../lib/ThemeProvider' - -function MyApp({ Component, pageProps }: AppProps) { - return ( - <> - - Realtime | Supabase - - - - - - - - - - - - - ) -} - -export default MyApp diff --git a/demo/pages/_document.tsx b/demo/pages/_document.tsx deleted file mode 100644 index 9e20a3fd2..000000000 --- a/demo/pages/_document.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import Document, { DocumentContext, Head, Html, Main, NextScript } from 'next/document' - -class MyDocument extends Document { - static async getInitialProps(ctx: DocumentContext) { - const initialProps = await Document.getInitialProps(ctx) - return initialProps - } - - render() { - return ( - - - - - -
- - - - ) - } -} - -export default MyDocument diff --git a/demo/pages/api/log.ts b/demo/pages/api/log.ts deleted file mode 100644 index a8a26317c..000000000 --- a/demo/pages/api/log.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { NextApiRequest, NextApiResponse } from 'next' - -const LOGFLARE_API_KEY = process.env.LOGFLARE_API_KEY || '' -const LOGFLARE_SOURCE_ID = process.env.LOGFLARE_SOURCE_ID || '' - -const recordLogs = async (req: NextApiRequest, res: NextApiResponse) => { - if (!LOGFLARE_API_KEY || !LOGFLARE_SOURCE_ID) { - return res.status(400).json('Logs are not being recorded') - } - if (req.method !== 'POST') { - return res.status(400).json('Only POST methods are supported') - } - - const body = await req.body - - try { - await fetch(`https://api.logflare.app/api/logs?source=${LOGFLARE_SOURCE_ID}`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-API-KEY': `${LOGFLARE_API_KEY}`, - }, - body: JSON.stringify(body), - }) - res.json('ok') - } catch (e) { - console.error(JSON.stringify(e)) - } -} - -export default recordLogs diff --git a/demo/postcss.config.js b/demo/postcss.config.js deleted file mode 100644 index 33ad091d2..000000000 --- a/demo/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -} diff --git a/demo/public/css/fonts.css b/demo/public/css/fonts.css deleted file mode 100644 index 47a1664ed..000000000 --- a/demo/public/css/fonts.css +++ /dev/null @@ -1,72 +0,0 @@ -/* header and body font */ - -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-Book.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-Book.woff) format('woff'); - font-weight: 400; - font-style: normal; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-BookItalic.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-BookItalic.woff) format('woff'); - font-weight: 400; - font-style: italic; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-Medium.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-Medium.woff) format('woff'); - font-weight: 500; - font-style: normal; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-MediumItalic.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-MediumItalic.woff) format('woff'); - font-weight: 500; - font-style: italic; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-Bold.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-Bold.woff) format('woff'); - font-weight: 700; - font-style: 600; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-BoldItalic.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-BoldItalic.woff) format('woff'); - font-style: 600; - font-style: italic; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-Black.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-Black.woff) format('woff'); - font-weight: 800; - font-style: normal; -} -@font-face { - font-family: 'circular'; - src: url(/fonts/custom-font/CustomFont-BlackItalic.woff2) format('woff2'), - url(/fonts/custom-font/CustomFont-BlackItalic.woff) format('woff'); - font-weight: 800; - font-style: italic; -} - -/* mono font */ - -@font-face { - font-family: 'source code pro'; - src: url('/fonts/source-code-pro/SourceCodePro-Regular.eot'); - src: url('/fonts/source-code-pro/SourceCodePro-Regular.woff2') format('woff2'), - url('/fonts/source-code-pro/SourceCodePro-Regular.woff') format('woff'), - url('/fonts/source-code-pro/SourceCodePro-Regular.ttf') format('truetype'), - url('/fonts/source-code-pro/SourceCodePro-Regular.svg#SourceCodePro-Regular') format('svg'); - font-weight: normal; - font-style: normal; - font-display: swap; -} diff --git a/demo/public/favicon.ico b/demo/public/favicon.ico deleted file mode 100644 index 718d6fea4..000000000 Binary files a/demo/public/favicon.ico and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Black.woff b/demo/public/fonts/custom-font/CustomFont-Black.woff deleted file mode 100644 index 091f927ea..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Black.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Black.woff2 b/demo/public/fonts/custom-font/CustomFont-Black.woff2 deleted file mode 100644 index e3c834e57..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Black.woff2 and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-BlackItalic.woff b/demo/public/fonts/custom-font/CustomFont-BlackItalic.woff deleted file mode 100644 index b5f6a877d..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-BlackItalic.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-BlackItalic.woff2 b/demo/public/fonts/custom-font/CustomFont-BlackItalic.woff2 deleted file mode 100644 index f84036283..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-BlackItalic.woff2 and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Bold.woff b/demo/public/fonts/custom-font/CustomFont-Bold.woff deleted file mode 100644 index f8d3f551d..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Bold.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Bold.woff2 b/demo/public/fonts/custom-font/CustomFont-Bold.woff2 deleted file mode 100644 index 5e7af4594..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Bold.woff2 and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-BoldItalic.woff b/demo/public/fonts/custom-font/CustomFont-BoldItalic.woff deleted file mode 100644 index 07e8a3507..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-BoldItalic.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-BoldItalic.woff2 b/demo/public/fonts/custom-font/CustomFont-BoldItalic.woff2 deleted file mode 100644 index ac0edd55f..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-BoldItalic.woff2 and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Book.woff b/demo/public/fonts/custom-font/CustomFont-Book.woff deleted file mode 100644 index 7d53d032f..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Book.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Book.woff2 b/demo/public/fonts/custom-font/CustomFont-Book.woff2 deleted file mode 100644 index abd31f7ec..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Book.woff2 and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-BookItalic.woff b/demo/public/fonts/custom-font/CustomFont-BookItalic.woff deleted file mode 100644 index 427cda875..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-BookItalic.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-BookItalic.woff2 b/demo/public/fonts/custom-font/CustomFont-BookItalic.woff2 deleted file mode 100644 index d326c8672..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-BookItalic.woff2 and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Medium.woff b/demo/public/fonts/custom-font/CustomFont-Medium.woff deleted file mode 100644 index 3707cb45d..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Medium.woff and /dev/null differ diff --git a/demo/public/fonts/custom-font/CustomFont-Medium.woff2 b/demo/public/fonts/custom-font/CustomFont-Medium.woff2 deleted file mode 100644 index c07131dde..000000000 Binary files a/demo/public/fonts/custom-font/CustomFont-Medium.woff2 and /dev/null differ diff --git a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.eot b/demo/public/fonts/source-code-pro/SourceCodePro-Regular.eot deleted file mode 100644 index e815e2cc6..000000000 Binary files a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.eot and /dev/null differ diff --git a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.svg b/demo/public/fonts/source-code-pro/SourceCodePro-Regular.svg deleted file mode 100644 index 45766eb08..000000000 --- a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.svg +++ /dev/null @@ -1,4016 +0,0 @@ - - - - -Created by FontForge 20170731 at Thu Jun 9 00:20:48 2016 - By Aleksey,,, -Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/), with Reserved Font Name `Source'. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.ttf b/demo/public/fonts/source-code-pro/SourceCodePro-Regular.ttf deleted file mode 100644 index 2d08f66a4..000000000 Binary files a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.ttf and /dev/null differ diff --git a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.woff b/demo/public/fonts/source-code-pro/SourceCodePro-Regular.woff deleted file mode 100644 index eacf83e53..000000000 Binary files a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.woff and /dev/null differ diff --git a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.woff2 b/demo/public/fonts/source-code-pro/SourceCodePro-Regular.woff2 deleted file mode 100644 index 21411ef40..000000000 Binary files a/demo/public/fonts/source-code-pro/SourceCodePro-Regular.woff2 and /dev/null differ diff --git a/demo/public/img/multiplayer-og.png b/demo/public/img/multiplayer-og.png deleted file mode 100644 index fe97b1cbb..000000000 Binary files a/demo/public/img/multiplayer-og.png and /dev/null differ diff --git a/demo/public/img/supabase-dark.svg b/demo/public/img/supabase-dark.svg deleted file mode 100644 index d1f685ac7..000000000 --- a/demo/public/img/supabase-dark.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/demo/public/img/supabase-light.svg b/demo/public/img/supabase-light.svg deleted file mode 100644 index 60cbc71dc..000000000 --- a/demo/public/img/supabase-light.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/demo/public/vercel.svg b/demo/public/vercel.svg deleted file mode 100644 index fbf0e25a6..000000000 --- a/demo/public/vercel.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - \ No newline at end of file diff --git a/demo/styles/globals.css b/demo/styles/globals.css deleted file mode 100644 index 5cf9a0fb6..000000000 --- a/demo/styles/globals.css +++ /dev/null @@ -1,95 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; - -@layer utilities { - .btn-primary { - @apply inline-block text-sm border border-green-500 rounded py-1 px-3 bg-green-500; - color: #fff !important; - font-weight: 600; - line-height: 20px; - text-align: center; - } - - .btn-primary-hover { - @apply bg-green-600; - cursor: pointer; - } -} - -html, -body, -#__next, -.main { - height: 100vh; - padding: 0; - margin: 0; - -moz-osx-font-smoothing: grayscale; - -webkit-font-smoothing: antialiased; - @apply bg-scale-100 dark:bg-scale-100; -} - -* { - box-sizing: border-box; -} - -a { - color: inherit; - text-decoration: none; -} - -/* Loader dots */ - -.loader-dots div { - animation-timing-function: cubic-bezier(0, 1, 1, 0); -} - -.loader-dots div:nth-child(1) { - left: 4px; - animation: loader-dots1 0.6s infinite; -} - -.loader-dots div:nth-child(2) { - left: 4px; - animation: loader-dots2 0.6s infinite; -} - -.loader-dots div:nth-child(3) { - left: 16px; - animation: loader-dots2 0.6s infinite; -} - -.loader-dots div:nth-child(4) { - left: 28px; - animation: loader-dots3 0.6s infinite; -} - -@keyframes loader-dots1 { - 0% { - transform: scale(0); - } - - 100% { - transform: scale(1); - } -} - -@keyframes loader-dots3 { - 0% { - transform: scale(1); - } - - 100% { - transform: scale(0); - } -} - -@keyframes loader-dots2 { - 0% { - transform: translate(0, 0); - } - - 100% { - transform: translate(12px, 0); - } -} diff --git a/demo/tailwind.config.js b/demo/tailwind.config.js deleted file mode 100644 index 6204c4a98..000000000 --- a/demo/tailwind.config.js +++ /dev/null @@ -1,147 +0,0 @@ -const ui = require('@supabase/ui/dist/config/ui.config.js') - -const blueGray = { - 50: '#F8FAFC', - 100: '#F1F5F9', - 200: '#E2E8F0', - 300: '#CBD5E1', - 400: '#94A3B8', - 500: '#64748B', - 600: '#475569', - 700: '#334155', - 800: '#1E293B', - 900: '#0F172A', -} - -const coolGray = { - 50: '#F9FAFB', - 100: '#F3F4F6', - 200: '#E5E7EB', - 300: '#D1D5DB', - 400: '#9CA3AF', - 500: '#6B7280', - 600: '#4B5563', - 700: '#374151', - 800: '#1F2937', - 900: '#111827', -} - -module.exports = ui({ - darkMode: 'class', // or 'media' or 'class' - content: [ - // purge styles from app - './pages/**/*.{js,ts,jsx,tsx}', - './components/**/*.{js,ts,jsx,tsx}', - './internals/**/*.{js,ts,jsx,tsx}', - './lib/**/*.{js,ts,jsx,tsx}', - './lib/**/**/*.{js,ts,jsx,tsx}', - // purge styles from supabase ui theme - './node_modules/@supabase/ui/dist/config/default-theme.js', - ], - theme: { - fontFamily: { - sans: ['circular', 'Helvetica Neue', 'Helvetica', 'Arial', 'sans-serif'], - mono: ['source code pro', 'Menlo', 'monospace'], - }, - borderColor: (theme) => ({ - ...theme('colors'), - DEFAULT: 'var(--colors-scale5)', - dark: 'var(--colors-scale4)', - }), - divideColor: (theme) => ({ - ...theme('colors'), - DEFAULT: 'var(--colors-scale3)', - dark: 'var(--colors-scale2)', - }), - extend: { - typography: ({ theme }) => ({ - // Removal of backticks in code blocks for tailwind v3.0 - // https://github.com/tailwindlabs/tailwindcss-typography/issues/135 - DEFAULT: { - css: { - 'code::before': { - content: '""', - }, - 'code::after': { - content: '""', - }, - }, - }, - }), - colors: { - /* typography */ - 'typography-body': { - light: 'var(--colors-scale11)', - dark: 'var(--colors-scale11)', - }, - 'typography-body-secondary': { - light: 'var(--colors-scale10)', - dark: 'var(--colors-scale10)', - }, - 'typography-body-strong': { - light: 'var(--colors-scale12)', - dark: 'var(--colors-scale12)', - }, - 'typography-body-faded': { - light: 'var(--colors-scale9)', - dark: 'var(--colors-scale9)', - }, - - /* borders */ - 'border-secondary': { - light: 'var(--colors-scale7)', - dark: 'var(--colors-scale7)', - }, - 'border-secondary-hover': { - light: 'var(--colors-scale9)', - dark: 'var(--colors-scale9)', - }, - - /* app backgrounds */ - 'bg-primary': { - light: 'var(--colors-scale2)', - dark: 'var(--colors-scale2)', - }, - 'bg-secondary': { - light: 'var(--colors-scale2)', - dark: 'var(--colors-scale2)', - }, - 'bg-alt': { - light: 'var(--colors-scale2)', - dark: 'var(--colors-scale2)', - }, - }, - animation: { - gradient: 'gradient 60s ease infinite', - 'ping-once': 'ping-once 1s cubic-bezier(0, 0, 0.2, 1);', - }, - keyframes: { - gradient: { - '0%': { - 'background-position': '0% 50%', - }, - '50%': { - 'background-position': '100% 50%', - }, - '100%': { - 'background-position': '0% 50%', - }, - }, - 'ping-once': { - '75%': { - transform: 'scale(2)', - opacity: 0, - }, - '100%': { - transform: 'scale(2)', - opacity: 0, - }, - }, - }, - }, - }, - variants: { - extend: {}, - }, - plugins: [require('@tailwindcss/typography')], -}) diff --git a/demo/tsconfig.json b/demo/tsconfig.json deleted file mode 100644 index 99710e857..000000000 --- a/demo/tsconfig.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "strict": true, - "forceConsistentCasingInFileNames": true, - "noEmit": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "node", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "preserve", - "incremental": true - }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], - "exclude": ["node_modules"] -} diff --git a/demo/types.ts b/demo/types.ts deleted file mode 100644 index d992d7a4a..000000000 --- a/demo/types.ts +++ /dev/null @@ -1,23 +0,0 @@ -export interface Coordinates { - x: number | undefined - y: number | undefined -} - -export interface Message { - id: number - user_id: string - message: string -} - -export interface Payload { - type: string - event: string - payload?: T -} - -export interface User extends Coordinates { - color: string - hue: string - isTyping?: boolean - message?: string -} diff --git a/demo/utils.ts b/demo/utils.ts deleted file mode 100644 index 382d8586d..000000000 --- a/demo/utils.ts +++ /dev/null @@ -1,5 +0,0 @@ -export const removeFirst = (src: any[], element: any) => { - const index = src.indexOf(element) - if (index === -1) return src - return [...src.slice(0, index), ...src.slice(index + 1)] -} diff --git a/docker-compose.tests.yml b/docker-compose.tests.yml new file mode 100644 index 000000000..56f5466e8 --- /dev/null +++ b/docker-compose.tests.yml @@ -0,0 +1,83 @@ +services: + # Supabase Realtime service + test_db: + image: supabase/postgres:14.1.0.105 + container_name: test-realtime-db + ports: + - "5532:5432" + volumes: + - ./dev/postgres:/docker-entrypoint-initdb.d/ + command: postgres -c config_file=/etc/postgresql/postgresql.conf + environment: + POSTGRES_HOST: /var/run/postgresql + POSTGRES_PASSWORD: postgres + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + test_realtime: + depends_on: + - test_db + build: . + container_name: test-realtime-server + ports: + - "4100:4100" + extra_hosts: + - "host.docker.internal:host-gateway" + environment: + PORT: 4100 + DB_HOST: host.docker.internal + DB_PORT: 5532 + DB_USER: postgres + DB_PASSWORD: postgres + DB_NAME: postgres + DB_ENC_KEY: 1234567890123456 + DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime' + API_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long + SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq + ERL_AFLAGS: -proto_dist inet_tcp + RLIMIT_NOFILE: 1000000 + DNS_NODES: "''" + APP_NAME: realtime + RUN_JANITOR: true + JANITOR_INTERVAL: 60000 + LOG_LEVEL: "info" + SEED_SELF_HOST: true + networks: + test-network: + aliases: + - realtime-dev.local + - realtime-dev.localhost + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:4100/"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 5s + + # Deno test runner + test-runner: + image: denoland/deno:alpine-2.5.6 + container_name: deno-test-runner + depends_on: + test_realtime: + condition: service_healthy + test_db: + condition: service_healthy + volumes: + - ./test/integration/tests.ts:/app/tests.ts:ro + working_dir: /app + command: > + sh -c " + echo 'Running tests...' && + deno test tests.ts --allow-import --no-check --allow-read --allow-net --trace-leaks --allow-env=WS_NO_BUFFER_UTIL + " + networks: + - test-network + extra_hosts: + - "realtime-dev.localhost:host-gateway" + +networks: + test-network: + driver: bridge diff --git a/lib/extensions/postgres_cdc_rls/cdc_rls.ex b/lib/extensions/postgres_cdc_rls/cdc_rls.ex index 57bf17352..6d9980a9c 100644 --- a/lib/extensions/postgres_cdc_rls/cdc_rls.ex +++ b/lib/extensions/postgres_cdc_rls/cdc_rls.ex @@ -6,11 +6,13 @@ defmodule Extensions.PostgresCdcRls do @behaviour Realtime.PostgresCdc use Realtime.Logs - alias RealtimeWeb.Endpoint alias Extensions.PostgresCdcRls, as: Rls + alias Realtime.GenCounter + alias Realtime.GenRpc + alias RealtimeWeb.Endpoint alias Rls.Subscriptions - alias Realtime.Rpc + @impl true @spec handle_connect(map()) :: {:ok, {pid(), pid()}} | nil def handle_connect(args) do case get_manager_conn(args["id"]) do @@ -26,22 +28,82 @@ defmodule Extensions.PostgresCdcRls do end end - def handle_after_connect({manager_pid, conn}, settings, params) do - publication = settings["publication"] - opts = [conn, publication, params, manager_pid, self()] - conn_node = node(conn) + @impl true + def handle_after_connect({manager_pid, conn}, settings, params_list, tenant) do + with {:ok, subscription_list} <- subscription_list(params_list) do + pool_size = Map.get(settings, "subcriber_pool_size", 4) + publication = settings["publication"] + create_subscription(conn, tenant, publication, pool_size, subscription_list, manager_pid, self()) + end + end + + @database_timeout_reason "Too many database timeouts" + + def create_subscription(conn, tenant, publication, pool_size, subscription_list, manager_pid, caller) + when node(conn) == node() do + rate_counter = rate_counter(tenant, pool_size) + + if rate_counter.limit.triggered == false do + case Subscriptions.create(conn, publication, subscription_list, manager_pid, caller) do + {:error, %DBConnection.ConnectionError{}} -> + GenCounter.add(rate_counter.id) + {:error, @database_timeout_reason} + + {:error, {:exit, _}} -> + GenCounter.add(rate_counter.id) + {:error, @database_timeout_reason} + + response -> + response + end + else + {:error, @database_timeout_reason} + end + end + + def create_subscription(conn, tenant, publication, pool_size, subscription_list, manager_pid, caller) do + rate_counter = rate_counter(tenant, pool_size) - if conn_node !== node() do - Rpc.call(conn_node, Subscriptions, :create, opts, timeout: 15_000) + if rate_counter.limit.triggered == false do + args = [conn, tenant, publication, pool_size, subscription_list, manager_pid, caller] + + case GenRpc.call(node(conn), __MODULE__, :create_subscription, args, timeout: 15_000, tenant_id: tenant) do + {:error, @database_timeout_reason} -> + GenCounter.add(rate_counter.id) + {:error, @database_timeout_reason} + + response -> + response + end else - apply(Subscriptions, :create, opts) + {:error, @database_timeout_reason} end end + defp rate_counter(tenant_id, pool_size) do + rate_counter_args = Realtime.Tenants.subscription_errors_per_second_rate(tenant_id, pool_size) + {:ok, rate_counter} = Realtime.RateCounter.get(rate_counter_args) + rate_counter + end + + defp subscription_list(params_list) do + Enum.reduce_while(params_list, {:ok, []}, fn params, {:ok, acc} -> + case Subscriptions.parse_subscription_params(params[:params]) do + {:ok, subscription_params} -> + {:cont, {:ok, [%{id: params.id, claims: params.claims, subscription_params: subscription_params} | acc]}} + + {:error, reason} -> + {:halt, {:error, {:malformed_subscription_params, reason}}} + end + end) + end + + @impl true def handle_subscribe(_, tenant, metadata) do Endpoint.subscribe("realtime:postgres:" <> tenant, metadata) end + @impl true @doc """ Stops the Supervision tree for a tenant. @@ -50,7 +112,9 @@ defmodule Extensions.PostgresCdcRls do @spec handle_stop(String.t(), non_neg_integer()) :: :ok def handle_stop(tenant, timeout) when is_binary(tenant) do - case :syn.whereis_name({__MODULE__, tenant}) do + scope = Realtime.Syn.PostgresCdc.scope(tenant) + + case :syn.whereis_name({scope, tenant}) do :undefined -> Logger.warning("Database supervisor not found for tenant #{tenant}") :ok @@ -70,7 +134,7 @@ defmodule Extensions.PostgresCdcRls do "Starting distributed postgres extension #{inspect(lauch_node: launch_node, region: region, platform_region: platform_region)}" ) - case Rpc.call(launch_node, __MODULE__, :start, [args], timeout: 30_000, tenant: tenant) do + case GenRpc.call(launch_node, __MODULE__, :start, [args], timeout: 30_000, tenant_id: tenant) do {:ok, _pid} = ok -> ok @@ -99,14 +163,16 @@ defmodule Extensions.PostgresCdcRls do %{ id: tenant, start: {Rls.WorkerSupervisor, :start_link, [args]}, - restart: :transient + restart: :temporary } ) end @spec get_manager_conn(String.t()) :: {:error, nil | :wait} | {:ok, pid(), pid()} def get_manager_conn(id) do - case :syn.lookup(__MODULE__, id) do + scope = Realtime.Syn.PostgresCdc.scope(id) + + case :syn.lookup(scope, id) do {_, %{manager: nil, subs_pool: nil}} -> {:error, :wait} {_, %{manager: manager, subs_pool: conn}} -> {:ok, manager, conn} _ -> {:error, nil} @@ -115,12 +181,15 @@ defmodule Extensions.PostgresCdcRls do @spec supervisor_id(String.t(), String.t()) :: {atom(), String.t(), map()} def supervisor_id(tenant, region) do - {__MODULE__, tenant, %{region: region, manager: nil, subs_pool: nil}} + scope = Realtime.Syn.PostgresCdc.scope(tenant) + {scope, tenant, %{region: region, manager: nil, subs_pool: nil}} end @spec update_meta(String.t(), pid(), pid()) :: {:ok, {pid(), term()}} | {:error, term()} def update_meta(tenant, manager_pid, subs_pool) do - :syn.update_registry(__MODULE__, tenant, fn pid, meta -> + scope = Realtime.Syn.PostgresCdc.scope(tenant) + + :syn.update_registry(scope, tenant, fn pid, meta -> if node(pid) == node(manager_pid) do %{meta | manager: manager_pid, subs_pool: subs_pool} else @@ -130,6 +199,4 @@ defmodule Extensions.PostgresCdcRls do end end) end - - def syn_topic(tenant_id), do: "cdc_rls:#{tenant_id}" end diff --git a/lib/extensions/postgres_cdc_rls/message_dispatcher.ex b/lib/extensions/postgres_cdc_rls/message_dispatcher.ex index 8e7ae7f5f..bbf9b69a5 100644 --- a/lib/extensions/postgres_cdc_rls/message_dispatcher.ex +++ b/lib/extensions/postgres_cdc_rls/message_dispatcher.ex @@ -7,23 +7,11 @@ defmodule Extensions.PostgresCdcRls.MessageDispatcher do """ alias Phoenix.Socket.Broadcast - alias Realtime.GenCounter - alias Realtime.RateCounter - alias Realtime.Tenants - - def dispatch([_ | _] = topic_subscriptions, _from, payload) do - {sub_ids, payload} = Map.pop(payload, :subscription_ids) - - [{_pid, {:subscriber_fastlane, _fastlane_pid, _serializer, _ids, _join_topic, tenant_id, _is_new_api}} | _] = - topic_subscriptions - - # Ensure RateCounter is started - rate = Tenants.db_events_per_second_rate(tenant_id) - RateCounter.new(rate) + def dispatch([_ | _] = topic_subscriptions, _from, {type, payload, sub_ids}) do _ = Enum.reduce(topic_subscriptions, %{}, fn - {_pid, {:subscriber_fastlane, fastlane_pid, serializer, ids, join_topic, _tenant, is_new_api}}, cache -> + {_pid, {:subscriber_fastlane, fastlane_pid, serializer, ids, join_topic, is_new_api}}, cache -> for {bin_id, id} <- ids, reduce: [] do acc -> if MapSet.member?(sub_ids, bin_id) do @@ -39,17 +27,12 @@ defmodule Extensions.PostgresCdcRls.MessageDispatcher do %Broadcast{ topic: join_topic, event: "postgres_changes", - payload: %{ids: valid_ids, data: payload} + payload: %{ids: valid_ids, data: Jason.Fragment.new(payload)} } else - %Broadcast{ - topic: join_topic, - event: payload.type, - payload: payload - } + %Broadcast{topic: join_topic, event: type, payload: Jason.Fragment.new(payload)} end - GenCounter.add(rate.id) broadcast_message(cache, fastlane_pid, new_payload, serializer) _ -> diff --git a/lib/extensions/postgres_cdc_rls/replication_poller.ex b/lib/extensions/postgres_cdc_rls/replication_poller.ex index 65f4a33f1..2cd5d90a5 100644 --- a/lib/extensions/postgres_cdc_rls/replication_poller.ex +++ b/lib/extensions/postgres_cdc_rls/replication_poller.ex @@ -18,14 +18,24 @@ defmodule Extensions.PostgresCdcRls.ReplicationPoller do alias Realtime.Adapters.Changes.NewRecord alias Realtime.Adapters.Changes.UpdatedRecord alias Realtime.Database + alias Realtime.RateCounter + alias Realtime.Tenants + + alias RealtimeWeb.TenantBroadcaster def start_link(opts), do: GenServer.start_link(__MODULE__, opts) @impl true def init(args) do + Process.flag(:fullsweep_after, 20) tenant_id = args["id"] Logger.metadata(external_id: tenant_id, project: tenant_id) + %Realtime.Api.Tenant{} = tenant = Tenants.Cache.get_tenant_by_external_id(tenant_id) + rate_counter_args = Tenants.db_events_per_second_rate(tenant) + + RateCounter.new(rate_counter_args) + state = %{ backoff: Backoff.new(backoff_min: 100, backoff_max: 5_000, backoff_type: :rand_exp), db_host: args["db_host"], @@ -41,7 +51,9 @@ defmodule Extensions.PostgresCdcRls.ReplicationPoller do retry_ref: nil, retry_count: 0, slot_name: args["slot_name"] <> slot_name_suffix(), - tenant_id: tenant_id + tenant_id: tenant_id, + rate_counter_args: rate_counter_args, + subscribers_nodes_table: args["subscribers_nodes_table"] } {:ok, _} = Registry.register(__MODULE__.Registry, tenant_id, %{}) @@ -74,7 +86,9 @@ defmodule Extensions.PostgresCdcRls.ReplicationPoller do max_record_bytes: max_record_bytes, max_changes: max_changes, conn: conn, - tenant_id: tenant_id + tenant_id: tenant_id, + subscribers_nodes_table: subscribers_nodes_table, + rate_counter_args: rate_counter_args } = state ) do cancel_timer(poll_ref) @@ -84,7 +98,7 @@ defmodule Extensions.PostgresCdcRls.ReplicationPoller do {time, list_changes} = :timer.tc(Replications, :list_changes, args) record_list_changes_telemetry(time, tenant_id) - case handle_list_changes_result(list_changes, tenant_id) do + case handle_list_changes_result(list_changes, subscribers_nodes_table, tenant_id, rate_counter_args) do {:ok, row_count} -> Backoff.reset(backoff) @@ -173,95 +187,149 @@ defmodule Extensions.PostgresCdcRls.ReplicationPoller do defp handle_list_changes_result( {:ok, %Postgrex.Result{ - columns: ["wal", "is_rls_enabled", "subscription_ids", "errors"] = columns, + columns: columns, rows: [_ | _] = rows, num_rows: rows_count }}, - tenant_id + subscribers_nodes_table, + tenant_id, + rate_counter_args ) do - for row <- rows, - change <- columns |> Enum.zip(row) |> generate_record() |> List.wrap() do - topic = "realtime:postgres:" <> tenant_id - - RealtimeWeb.TenantBroadcaster.pubsub_broadcast(tenant_id, topic, change, MessageDispatcher) + case RateCounter.get(rate_counter_args) do + {:ok, %{limit: %{triggered: true}}} -> + :ok + + _ -> + for row <- rows, + change <- columns |> Enum.zip(row) |> generate_record() |> List.wrap() do + topic = "realtime:postgres:" <> tenant_id + + Realtime.GenCounter.add(rate_counter_args.id, MapSet.size(change.subscription_ids)) + + payload = + change + |> Map.drop([:subscription_ids]) + |> Jason.encode!() + + case collect_subscription_nodes(subscribers_nodes_table, change.subscription_ids) do + {:ok, nodes} -> + for {node, subscription_ids} <- nodes do + TenantBroadcaster.pubsub_direct_broadcast( + node, + tenant_id, + topic, + # Send only the subscription IDs relevant to this node + {change.type, payload, MapSet.new(subscription_ids)}, + MessageDispatcher, + :postgres_changes + ) + end + + {:error, :node_not_found} -> + TenantBroadcaster.pubsub_broadcast( + tenant_id, + topic, + {change.type, payload, change.subscription_ids}, + MessageDispatcher, + :postgres_changes + ) + end + end end {:ok, rows_count} end - defp handle_list_changes_result({:ok, _}, _), do: {:ok, 0} - defp handle_list_changes_result({:error, reason}, _), do: {:error, reason} + defp handle_list_changes_result({:ok, _}, _, _, _), do: {:ok, 0} + defp handle_list_changes_result({:error, reason}, _, _, _), do: {:error, reason} + + defp collect_subscription_nodes(subscribers_nodes_table, subscription_ids) do + Enum.reduce_while(subscription_ids, {:ok, %{}}, fn subscription_id, {:ok, acc} -> + case :ets.lookup(subscribers_nodes_table, subscription_id) do + [{_, node}] -> + updated_acc = + Map.update(acc, node, [subscription_id], fn existing_ids -> [subscription_id | existing_ids] end) + + {:cont, {:ok, updated_acc}} + + _ -> + {:halt, {:error, :node_not_found}} + end + end) + rescue + _ -> {:error, :node_not_found} + end def generate_record([ - {"wal", - %{ - "type" => "INSERT" = type, - "schema" => schema, - "table" => table - } = wal}, - {"is_rls_enabled", _}, + {"type", "INSERT" = type}, + {"schema", schema}, + {"table", table}, + {"columns", columns}, + {"record", record}, + {"old_record", _}, + {"commit_timestamp", commit_timestamp}, {"subscription_ids", subscription_ids}, {"errors", errors} ]) when is_list(subscription_ids) do %NewRecord{ - columns: Map.get(wal, "columns", []), - commit_timestamp: Map.get(wal, "commit_timestamp"), + columns: Jason.Fragment.new(columns), + commit_timestamp: commit_timestamp, errors: convert_errors(errors), schema: schema, table: table, type: type, subscription_ids: MapSet.new(subscription_ids), - record: Map.get(wal, "record", %{}) + record: Jason.Fragment.new(record) } end def generate_record([ - {"wal", - %{ - "type" => "UPDATE" = type, - "schema" => schema, - "table" => table - } = wal}, - {"is_rls_enabled", _}, + {"type", "UPDATE" = type}, + {"schema", schema}, + {"table", table}, + {"columns", columns}, + {"record", record}, + {"old_record", old_record}, + {"commit_timestamp", commit_timestamp}, {"subscription_ids", subscription_ids}, {"errors", errors} ]) when is_list(subscription_ids) do %UpdatedRecord{ - columns: Map.get(wal, "columns", []), - commit_timestamp: Map.get(wal, "commit_timestamp"), + columns: Jason.Fragment.new(columns), + commit_timestamp: commit_timestamp, errors: convert_errors(errors), schema: schema, table: table, type: type, subscription_ids: MapSet.new(subscription_ids), - old_record: Map.get(wal, "old_record", %{}), - record: Map.get(wal, "record", %{}) + old_record: Jason.Fragment.new(old_record), + record: Jason.Fragment.new(record) } end def generate_record([ - {"wal", - %{ - "type" => "DELETE" = type, - "schema" => schema, - "table" => table - } = wal}, - {"is_rls_enabled", _}, + {"type", "DELETE" = type}, + {"schema", schema}, + {"table", table}, + {"columns", columns}, + {"record", _}, + {"old_record", old_record}, + {"commit_timestamp", commit_timestamp}, {"subscription_ids", subscription_ids}, {"errors", errors} ]) when is_list(subscription_ids) do %DeletedRecord{ - columns: Map.get(wal, "columns", []), - commit_timestamp: Map.get(wal, "commit_timestamp"), + columns: Jason.Fragment.new(columns), + commit_timestamp: commit_timestamp, errors: convert_errors(errors), schema: schema, table: table, type: type, subscription_ids: MapSet.new(subscription_ids), - old_record: Map.get(wal, "old_record", %{}) + old_record: Jason.Fragment.new(old_record) } end diff --git a/lib/extensions/postgres_cdc_rls/replications.ex b/lib/extensions/postgres_cdc_rls/replications.ex index 16b4f997d..be1f0acd9 100644 --- a/lib/extensions/postgres_cdc_rls/replications.ex +++ b/lib/extensions/postgres_cdc_rls/replications.ex @@ -72,7 +72,18 @@ defmodule Extensions.PostgresCdcRls.Replications do def list_changes(conn, slot_name, publication, max_changes, max_record_bytes) do query( conn, - "select * from realtime.list_changes($1, $2, $3, $4)", + """ + SELECT wal->>'type' as type, + wal->>'schema' as schema, + wal->>'table' as table, + COALESCE(wal->>'columns', '[]') as columns, + COALESCE(wal->>'record', '{}') as record, + COALESCE(wal->>'old_record', '{}') as old_record, + wal->>'commit_timestamp' as commit_timestamp, + subscription_ids, + errors + FROM realtime.list_changes($1, $2, $3, $4) + """, [ publication, slot_name, diff --git a/lib/extensions/postgres_cdc_rls/subscription_manager.ex b/lib/extensions/postgres_cdc_rls/subscription_manager.ex index 2dba9912e..175376e12 100644 --- a/lib/extensions/postgres_cdc_rls/subscription_manager.ex +++ b/lib/extensions/postgres_cdc_rls/subscription_manager.ex @@ -24,7 +24,8 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do defstruct [ :id, :publication, - :subscribers_tid, + :subscribers_pids_table, + :subscribers_nodes_table, :conn, :delete_queue, :no_users_ref, @@ -37,7 +38,8 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do @type t :: %__MODULE__{ id: String.t(), publication: String.t(), - subscribers_tid: :ets.tid(), + subscribers_pids_table: :ets.tid(), + subscribers_nodes_table: :ets.tid(), conn: Postgrex.conn(), oids: map(), check_oid_ref: reference() | nil, @@ -67,7 +69,12 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do @impl true def handle_continue({:connect, args}, _) do - %{"id" => id, "publication" => publication, "subscribers_tid" => subscribers_tid} = args + %{ + "id" => id, + "publication" => publication, + "subscribers_pids_table" => subscribers_pids_table, + "subscribers_nodes_table" => subscribers_nodes_table + } = args subscription_manager_settings = Database.from_settings(args, "realtime_subscription_manager") @@ -85,19 +92,21 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do check_region_interval = Map.get(args, :check_region_interval, rebalance_check_interval_in_ms()) send_region_check_message(check_region_interval) - state = %State{ - id: id, - conn: conn, - publication: publication, - subscribers_tid: subscribers_tid, - oids: oids, - delete_queue: %{ - ref: check_delete_queue(), - queue: :queue.new() - }, - no_users_ref: check_no_users(), - check_region_interval: check_region_interval - } + state = + %State{ + id: id, + conn: conn, + publication: publication, + subscribers_pids_table: subscribers_pids_table, + subscribers_nodes_table: subscribers_nodes_table, + oids: oids, + delete_queue: %{ + ref: check_delete_queue(), + queue: :queue.new() + }, + no_users_ref: check_no_users(), + check_region_interval: check_region_interval + } send(self(), :check_oids) {:noreply, state} @@ -105,11 +114,13 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do @impl true def handle_info({:subscribed, {pid, id}}, state) do - case :ets.match(state.subscribers_tid, {pid, id, :"$1", :_}) do - [] -> :ets.insert(state.subscribers_tid, {pid, id, Process.monitor(pid), node(pid)}) + case :ets.match(state.subscribers_pids_table, {pid, id, :"$1", :_}) do + [] -> :ets.insert(state.subscribers_pids_table, {pid, id, Process.monitor(pid), node(pid)}) _ -> :ok end + :ets.insert(state.subscribers_nodes_table, {UUID.string_to_binary!(id), node(pid)}) + {:noreply, %{state | no_users_ts: nil}} end @@ -132,7 +143,7 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do Process.demonitor(ref, [:flush]) send(pid, :postgres_subscribe) end - |> :ets.foldl([], state.subscribers_tid) + |> :ets.foldl([], state.subscribers_pids_table) new_oids end @@ -142,19 +153,25 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do def handle_info( {:DOWN, _ref, :process, pid, _reason}, - %State{subscribers_tid: tid, delete_queue: %{queue: q}} = state + %State{ + subscribers_pids_table: subscribers_pids_table, + subscribers_nodes_table: subscribers_nodes_table, + delete_queue: %{queue: q} + } = state ) do q1 = - case :ets.take(tid, pid) do + case :ets.take(subscribers_pids_table, pid) do [] -> q values -> for {_pid, id, _ref, _node} <- values, reduce: q do acc -> - id - |> UUID.string_to_binary!() - |> :queue.in(acc) + bin_id = UUID.string_to_binary!(id) + + :ets.delete(subscribers_nodes_table, bin_id) + + :queue.in(bin_id, acc) end end @@ -187,7 +204,7 @@ defmodule Extensions.PostgresCdcRls.SubscriptionManager do {:noreply, %{state | delete_queue: %{ref: ref, queue: q1}}} end - def handle_info(:check_no_users, %{subscribers_tid: tid, no_users_ts: ts} = state) do + def handle_info(:check_no_users, %{subscribers_pids_table: tid, no_users_ts: ts} = state) do Helpers.cancel_timer(state.no_users_ref) ts_new = diff --git a/lib/extensions/postgres_cdc_rls/subscriptions.ex b/lib/extensions/postgres_cdc_rls/subscriptions.ex index c8c0eda5f..45da8b052 100644 --- a/lib/extensions/postgres_cdc_rls/subscriptions.ex +++ b/lib/extensions/postgres_cdc_rls/subscriptions.ex @@ -7,13 +7,17 @@ defmodule Extensions.PostgresCdcRls.Subscriptions do import Postgrex, only: [transaction: 2, query: 3, rollback: 2] @type conn() :: Postgrex.conn() + @type filter :: {binary, binary, binary} + @type subscription_params :: {binary, binary, [filter]} + @type subscription_list :: [%{id: binary, claims: map, subscription_params: subscription_params}] @filter_types ["eq", "neq", "lt", "lte", "gt", "gte", "in"] - @spec create(conn(), String.t(), [map()], pid(), pid()) :: + @spec create(conn(), String.t(), subscription_list, pid(), pid()) :: {:ok, Postgrex.Result.t()} - | {:error, Exception.t() | :malformed_subscription_params | {:subscription_insert_failed, map()}} - def create(conn, publication, params_list, manager, caller) do + | {:error, Exception.t() | {:exit, term} | {:subscription_insert_failed, String.t()}} + + def create(conn, publication, subscription_list, manager, caller) do sql = "with sub_tables as ( select rr.entity @@ -50,37 +54,34 @@ defmodule Extensions.PostgresCdcRls.Subscriptions do id" transaction(conn, fn conn -> - Enum.map(params_list, fn %{id: id, claims: claims, params: params} -> - case parse_subscription_params(params) do - {:ok, [schema, table, filters]} -> - case query(conn, sql, [publication, schema, table, id, claims, filters]) do - {:ok, %{num_rows: num} = result} when num > 0 -> - send(manager, {:subscribed, {caller, id}}) - result - - {:ok, _} -> - msg = - "Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [#{params_to_log(params)}]" + Enum.map(subscription_list, fn %{id: id, claims: claims, subscription_params: params = {schema, table, filters}} -> + case query(conn, sql, [publication, schema, table, id, claims, filters]) do + {:ok, %{num_rows: num} = result} when num > 0 -> + send(manager, {:subscribed, {caller, id}}) + result - rollback(conn, msg) + {:ok, _} -> + msg = + "Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [#{params_to_log(params)}]" - {:error, exception} -> - msg = - "Unable to subscribe to changes with given parameters. An exception happened so please check your connect parameters: [#{params_to_log(params)}]. Exception: #{Exception.message(exception)}" + rollback(conn, {:subscription_insert_failed, msg}) - rollback(conn, msg) - end + {:error, exception} -> + msg = + "Unable to subscribe to changes with given parameters. An exception happened so please check your connect parameters: [#{params_to_log(params)}]. Exception: #{Exception.message(exception)}" - {:error, reason} -> - rollback(conn, reason) + rollback(conn, {:subscription_insert_failed, msg}) end end) end) + rescue + e in DBConnection.ConnectionError -> {:error, e} + catch + :exit, reason -> {:error, {:exit, reason}} end - defp params_to_log(map) do - map - |> Map.to_list() + defp params_to_log({schema, table, filters}) do + [schema: schema, table: table, filters: filters] |> Enum.map_join(", ", fn {k, v} -> "#{k}: #{to_log(v)}" end) end @@ -166,31 +167,47 @@ defmodule Extensions.PostgresCdcRls.Subscriptions do ## Examples - iex> params = %{"schema" => "public", "table" => "messages", "filter" => "subject=eq.hey"} - iex> Extensions.PostgresCdcRls.Subscriptions.parse_subscription_params(params) - {:ok, ["public", "messages", [{"subject", "eq", "hey"}]]} + iex> parse_subscription_params(%{"schema" => "public", "table" => "messages", "filter" => "subject=eq.hey"}) + {:ok, {"public", "messages", [{"subject", "eq", "hey"}]}} `in` filter: - iex> params = %{"schema" => "public", "table" => "messages", "filter" => "subject=in.(hidee,ho)"} - iex> Extensions.PostgresCdcRls.Subscriptions.parse_subscription_params(params) - {:ok, ["public", "messages", [{"subject", "in", "{hidee,ho}"}]]} + iex> parse_subscription_params(%{"schema" => "public", "table" => "messages", "filter" => "subject=in.(hidee,ho)"}) + {:ok, {"public", "messages", [{"subject", "in", "{hidee,ho}"}]}} + + no filter: + + iex> parse_subscription_params(%{"schema" => "public", "table" => "messages"}) + {:ok, {"public", "messages", []}} + + only schema: + + iex> parse_subscription_params(%{"schema" => "public"}) + {:ok, {"public", "*", []}} + + only table: + + iex> parse_subscription_params(%{"table" => "messages"}) + {:ok, {"public", "messages", []}} An unsupported filter will respond with an error tuple: - iex> params = %{"schema" => "public", "table" => "messages", "filter" => "subject=like.hey"} - iex> Extensions.PostgresCdcRls.Subscriptions.parse_subscription_params(params) + iex> parse_subscription_params(%{"schema" => "public", "table" => "messages", "filter" => "subject=like.hey"}) {:error, ~s(Error parsing `filter` params: ["like", "hey"])} Catch `undefined` filters: - iex> params = %{"schema" => "public", "table" => "messages", "filter" => "undefined"} - iex> Extensions.PostgresCdcRls.Subscriptions.parse_subscription_params(params) + iex> parse_subscription_params(%{"schema" => "public", "table" => "messages", "filter" => "undefined"}) {:error, ~s(Error parsing `filter` params: ["undefined"])} + Catch `missing params`: + + iex> parse_subscription_params(%{}) + {:error, ~s(No subscription params provided. Please provide at least a `schema` or `table` to subscribe to: %{})} + """ - @spec parse_subscription_params(map()) :: {:ok, list} | {:error, binary()} + @spec parse_subscription_params(map()) :: {:ok, subscription_params} | {:error, binary()} def parse_subscription_params(params) do case params do %{"schema" => schema, "table" => table, "filter" => filter} -> @@ -198,7 +215,7 @@ defmodule Extensions.PostgresCdcRls.Subscriptions do [filter_type, value] when filter_type in @filter_types <- String.split(rest, ".", parts: 2), {:ok, formatted_value} <- format_filter_value(filter_type, value) do - {:ok, [schema, table, [{col, filter_type, formatted_value}]]} + {:ok, {schema, table, [{col, filter_type, formatted_value}]}} else {:error, msg} -> {:error, "Error parsing `filter` params: #{msg}"} @@ -208,13 +225,13 @@ defmodule Extensions.PostgresCdcRls.Subscriptions do end %{"schema" => schema, "table" => table} -> - {:ok, [schema, table, []]} + {:ok, {schema, table, []}} %{"schema" => schema} -> - {:ok, [schema, "*", []]} + {:ok, {schema, "*", []}} %{"table" => table} -> - {:ok, ["public", table, []]} + {:ok, {"public", table, []}} map when is_map_key(map, "user_token") or is_map_key(map, "auth_token") -> {:error, diff --git a/lib/extensions/postgres_cdc_rls/subscriptions_checker.ex b/lib/extensions/postgres_cdc_rls/subscriptions_checker.ex index ed2b42eb5..cca3dc02a 100644 --- a/lib/extensions/postgres_cdc_rls/subscriptions_checker.ex +++ b/lib/extensions/postgres_cdc_rls/subscriptions_checker.ex @@ -7,7 +7,7 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do alias Realtime.Database alias Realtime.Helpers - alias Realtime.Rpc + alias Realtime.GenRpc alias Realtime.Telemetry alias Rls.Subscriptions @@ -17,13 +17,14 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do defmodule State do @moduledoc false - defstruct [:id, :conn, :check_active_pids, :subscribers_tid, :delete_queue] + defstruct [:id, :conn, :check_active_pids, :subscribers_pids_table, :subscribers_nodes_table, :delete_queue] @type t :: %__MODULE__{ id: String.t(), conn: Postgrex.conn(), check_active_pids: reference(), - subscribers_tid: :ets.tid(), + subscribers_pids_table: :ets.tid(), + subscribers_nodes_table: :ets.tid(), delete_queue: %{ ref: reference(), queue: :queue.queue() @@ -47,7 +48,11 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do @impl true def handle_continue({:connect, args}, _) do - %{"id" => id, "subscribers_tid" => subscribers_tid} = args + %{ + "id" => id, + "subscribers_pids_table" => subscribers_pids_table, + "subscribers_nodes_table" => subscribers_nodes_table + } = args realtime_subscription_checker_settings = Database.from_settings(args, "realtime_subscription_checker") @@ -58,7 +63,8 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do id: id, conn: conn, check_active_pids: check_active_pids(), - subscribers_tid: subscribers_tid, + subscribers_pids_table: subscribers_pids_table, + subscribers_nodes_table: subscribers_nodes_table, delete_queue: %{ ref: nil, queue: :queue.new() @@ -69,18 +75,14 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do end @impl true - def handle_info( - :check_active_pids, - %State{check_active_pids: ref, subscribers_tid: tid, delete_queue: delete_queue, id: id} = - state - ) do + def handle_info(:check_active_pids, %State{check_active_pids: ref, delete_queue: delete_queue, id: id} = state) do Helpers.cancel_timer(ref) ids = - tid + state.subscribers_pids_table |> subscribers_by_node() |> not_alive_pids_dist() - |> pop_not_alive_pids(tid, id) + |> pop_not_alive_pids(state.subscribers_pids_table, state.subscribers_nodes_table, id) new_delete_queue = if length(ids) > 0 do @@ -128,10 +130,10 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do ## Internal functions - @spec pop_not_alive_pids([pid()], :ets.tid(), binary()) :: [Ecto.UUID.t()] - def pop_not_alive_pids(pids, tid, tenant_id) do + @spec pop_not_alive_pids([pid()], :ets.tid(), :ets.tid(), binary()) :: [Ecto.UUID.t()] + def pop_not_alive_pids(pids, subscribers_pids_table, subscribers_nodes_table, tenant_id) do Enum.reduce(pids, [], fn pid, acc -> - case :ets.lookup(tid, pid) do + case :ets.lookup(subscribers_pids_table, pid) do [] -> Telemetry.execute( [:realtime, :subscriptions_checker, :pid_not_found], @@ -149,8 +151,11 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do %{tenant_id: tenant_id} ) - :ets.delete(tid, pid) - UUID.string_to_binary!(postgres_id) + :ets.delete(subscribers_pids_table, pid) + bin_id = UUID.string_to_binary!(postgres_id) + + :ets.delete(subscribers_nodes_table, bin_id) + bin_id end ++ acc end end) @@ -172,8 +177,8 @@ defmodule Extensions.PostgresCdcRls.SubscriptionsChecker do if node == node() do acc ++ not_alive_pids(pids) else - case Rpc.call(node, __MODULE__, :not_alive_pids, [pids], timeout: 15_000) do - {:badrpc, _} = error -> + case GenRpc.call(node, __MODULE__, :not_alive_pids, [pids], timeout: 15_000) do + {:error, :rpc_error, _} = error -> log_error("UnableToCheckProcessesOnRemoteNode", error) acc diff --git a/lib/extensions/postgres_cdc_rls/supervisor.ex b/lib/extensions/postgres_cdc_rls/supervisor.ex index 21e124190..fc3701aeb 100644 --- a/lib/extensions/postgres_cdc_rls/supervisor.ex +++ b/lib/extensions/postgres_cdc_rls/supervisor.ex @@ -15,7 +15,7 @@ defmodule Extensions.PostgresCdcRls.Supervisor do def init(_args) do load_migrations_modules() - :syn.add_node_to_scopes([PostgresCdcRls]) + :syn.add_node_to_scopes(Realtime.Syn.PostgresCdc.scopes()) children = [ { diff --git a/lib/extensions/postgres_cdc_rls/worker_supervisor.ex b/lib/extensions/postgres_cdc_rls/worker_supervisor.ex index 37f88014e..548a71688 100644 --- a/lib/extensions/postgres_cdc_rls/worker_supervisor.ex +++ b/lib/extensions/postgres_cdc_rls/worker_supervisor.ex @@ -6,7 +6,7 @@ defmodule Extensions.PostgresCdcRls.WorkerSupervisor do alias PostgresCdcRls.ReplicationPoller alias PostgresCdcRls.SubscriptionManager alias PostgresCdcRls.SubscriptionsChecker - alias Realtime.Api + alias Realtime.Tenants.Cache alias Realtime.PostgresCdc.Exception def start_link(args) do @@ -17,14 +17,21 @@ defmodule Extensions.PostgresCdcRls.WorkerSupervisor do @impl true def init(%{"id" => tenant} = args) when is_binary(tenant) do Logger.metadata(external_id: tenant, project: tenant) - unless Api.get_tenant_by_external_id(tenant, :primary), do: raise(Exception) + unless Cache.get_tenant_by_external_id(tenant), do: raise(Exception) - tid_args = Map.merge(args, %{"subscribers_tid" => :ets.new(__MODULE__, [:public, :bag])}) + subscribers_pids_table = :ets.new(__MODULE__, [:public, :bag]) + subscribers_nodes_table = :ets.new(__MODULE__, [:public, :set]) + + tid_args = + Map.merge(args, %{ + "subscribers_pids_table" => subscribers_pids_table, + "subscribers_nodes_table" => subscribers_nodes_table + }) children = [ %{ id: ReplicationPoller, - start: {ReplicationPoller, :start_link, [args]}, + start: {ReplicationPoller, :start_link, [tid_args]}, restart: :transient }, %{ diff --git a/lib/realtime/adapters/postgres/decoder.ex b/lib/realtime/adapters/postgres/decoder.ex index e5ea161e3..82effbd94 100644 --- a/lib/realtime/adapters/postgres/decoder.ex +++ b/lib/realtime/adapters/postgres/decoder.ex @@ -136,37 +136,25 @@ defmodule Realtime.Adapters.Postgres.Decoder do @pg_epoch DateTime.from_iso8601("2000-01-01T00:00:00Z") - alias Messages.{ - Begin, - Commit, - Origin, - Relation, - Relation.Column, - Insert, - Update, - Delete, - Truncate, - Type, - Unsupported - } + alias Messages.Begin + alias Messages.Commit + alias Messages.Origin + alias Messages.Relation + alias Messages.Relation.Column + alias Messages.Insert + alias Messages.Type + alias Messages.Unsupported alias Realtime.Adapters.Postgres.OidDatabase @doc """ Parses logical replication messages from Postgres - - ## Examples - - iex> decode_message(<<73, 0, 0, 96, 0, 78, 0, 2, 116, 0, 0, 0, 3, 98, 97, 122, 116, 0, 0, 0, 3, 53, 54, 48>>) - %Realtime.Adapters.Postgres.Decoder.Messages.Insert{relation_id: 24576, tuple_data: {"baz", "560"}} - """ - def decode_message(message) when is_binary(message) do - # Logger.debug("Message before conversion " <> message) - decode_message_impl(message) + def decode_message(message, relations) when is_binary(message) do + decode_message_impl(message, relations) end - defp decode_message_impl(<<"B", lsn::binary-8, timestamp::integer-64, xid::integer-32>>) do + defp decode_message_impl(<<"B", lsn::binary-8, timestamp::integer-64, xid::integer-32>>, _relations) do %Begin{ final_lsn: decode_lsn(lsn), commit_timestamp: pgtimestamp_to_timestamp(timestamp), @@ -174,7 +162,10 @@ defmodule Realtime.Adapters.Postgres.Decoder do } end - defp decode_message_impl(<<"C", _flags::binary-1, lsn::binary-8, end_lsn::binary-8, timestamp::integer-64>>) do + defp decode_message_impl( + <<"C", _flags::binary-1, lsn::binary-8, end_lsn::binary-8, timestamp::integer-64>>, + _relations + ) do %Commit{ flags: [], lsn: decode_lsn(lsn), @@ -184,14 +175,14 @@ defmodule Realtime.Adapters.Postgres.Decoder do end # TODO: Verify this is correct with real data from Postgres - defp decode_message_impl(<<"O", lsn::binary-8, name::binary>>) do + defp decode_message_impl(<<"O", lsn::binary-8, name::binary>>, _relations) do %Origin{ origin_commit_lsn: decode_lsn(lsn), name: name } end - defp decode_message_impl(<<"R", id::integer-32, rest::binary>>) do + defp decode_message_impl(<<"R", id::integer-32, rest::binary>>, _relations) do [ namespace | [name | [<>]] @@ -215,81 +206,22 @@ defmodule Realtime.Adapters.Postgres.Decoder do } end - defp decode_message_impl(<<"I", relation_id::integer-32, "N", number_of_columns::integer-16, tuple_data::binary>>) do - {<<>>, decoded_tuple_data} = decode_tuple_data(tuple_data, number_of_columns) - - %Insert{ - relation_id: relation_id, - tuple_data: decoded_tuple_data - } - end - - defp decode_message_impl(<<"U", relation_id::integer-32, "N", number_of_columns::integer-16, tuple_data::binary>>) do - {<<>>, decoded_tuple_data} = decode_tuple_data(tuple_data, number_of_columns) - - %Update{ - relation_id: relation_id, - tuple_data: decoded_tuple_data - } - end - defp decode_message_impl( - <<"U", relation_id::integer-32, key_or_old::binary-1, number_of_columns::integer-16, tuple_data::binary>> - ) - when key_or_old == "O" or key_or_old == "K" do - {<<"N", new_number_of_columns::integer-16, new_tuple_binary::binary>>, old_decoded_tuple_data} = - decode_tuple_data(tuple_data, number_of_columns) + <<"I", relation_id::integer-32, "N", number_of_columns::integer-16, tuple_data::binary>>, + relations + ) do + relation = relations |> get_in([relation_id, :columns]) - {<<>>, decoded_tuple_data} = decode_tuple_data(new_tuple_binary, new_number_of_columns) - - base_update_msg = %Update{ - relation_id: relation_id, - tuple_data: decoded_tuple_data - } + if relation do + {<<>>, decoded_tuple_data} = decode_tuple_data(tuple_data, number_of_columns, relation) - case key_or_old do - "K" -> Map.put(base_update_msg, :changed_key_tuple_data, old_decoded_tuple_data) - "O" -> Map.put(base_update_msg, :old_tuple_data, old_decoded_tuple_data) + %Insert{relation_id: relation_id, tuple_data: decoded_tuple_data} + else + %Unsupported{} end end - defp decode_message_impl( - <<"D", relation_id::integer-32, key_or_old::binary-1, number_of_columns::integer-16, tuple_data::binary>> - ) - when key_or_old == "K" or key_or_old == "O" do - {<<>>, decoded_tuple_data} = decode_tuple_data(tuple_data, number_of_columns) - - base_delete_msg = %Delete{ - relation_id: relation_id - } - - case key_or_old do - "K" -> Map.put(base_delete_msg, :changed_key_tuple_data, decoded_tuple_data) - "O" -> Map.put(base_delete_msg, :old_tuple_data, decoded_tuple_data) - end - end - - defp decode_message_impl(<<"T", number_of_relations::integer-32, options::integer-8, column_ids::binary>>) do - truncated_relations = - for relation_id_bin <- column_ids |> :binary.bin_to_list() |> Enum.chunk_every(4), - do: relation_id_bin |> :binary.list_to_bin() |> :binary.decode_unsigned() - - decoded_options = - case options do - 0 -> [] - 1 -> [:cascade] - 2 -> [:restart_identity] - 3 -> [:cascade, :restart_identity] - end - - %Truncate{ - number_of_relations: number_of_relations, - options: decoded_options, - truncated_relations: truncated_relations - } - end - - defp decode_message_impl(<<"Y", data_type_id::integer-32, namespace_and_name::binary>>) do + defp decode_message_impl(<<"Y", data_type_id::integer-32, namespace_and_name::binary>>, _relations) do [namespace, name_with_null] = :binary.split(namespace_and_name, <<0>>) name = String.slice(name_with_null, 0..-2//1) @@ -300,32 +232,54 @@ defmodule Realtime.Adapters.Postgres.Decoder do } end - defp decode_message_impl(binary), do: %Unsupported{data: binary} + defp decode_message_impl(binary, _relations), do: %Unsupported{data: binary} - defp decode_tuple_data(binary, columns_remaining, accumulator \\ []) + defp decode_tuple_data(binary, columns_remaining, relations, accumulator \\ []) - defp decode_tuple_data(remaining_binary, 0, accumulator) when is_binary(remaining_binary), + defp decode_tuple_data(remaining_binary, 0, _relations, accumulator) when is_binary(remaining_binary), do: {remaining_binary, accumulator |> Enum.reverse() |> List.to_tuple()} - defp decode_tuple_data(<<"n", rest::binary>>, columns_remaining, accumulator), - do: decode_tuple_data(rest, columns_remaining - 1, [nil | accumulator]) + defp decode_tuple_data(<<"n", rest::binary>>, columns_remaining, [_ | relations], accumulator), + do: decode_tuple_data(rest, columns_remaining - 1, relations, [nil | accumulator]) - defp decode_tuple_data(<<"u", rest::binary>>, columns_remaining, accumulator), - do: decode_tuple_data(rest, columns_remaining - 1, [:unchanged_toast | accumulator]) + defp decode_tuple_data(<<"u", rest::binary>>, columns_remaining, [_ | relations], accumulator), + do: decode_tuple_data(rest, columns_remaining - 1, relations, [:unchanged_toast | accumulator]) + @start_date "2000-01-01T00:00:00Z" defp decode_tuple_data( - <<"t", column_length::integer-32, rest::binary>>, + <<"b", column_length::integer-32, rest::binary>>, columns_remaining, + [%Column{type: type} | relations], accumulator - ), - do: - decode_tuple_data( - :erlang.binary_part(rest, {byte_size(rest), -(byte_size(rest) - column_length)}), - columns_remaining - 1, - [ - :erlang.binary_part(rest, {0, column_length}) | accumulator - ] - ) + ) do + data = :erlang.binary_part(rest, {0, column_length}) + remainder = :erlang.binary_part(rest, {byte_size(rest), -(byte_size(rest) - column_length)}) + + data = + case type do + "bool" -> + data == <<1>> + + "jsonb" -> + <<1, rest::binary>> = data + rest + + "timestamp" -> + <> = data + + @start_date + |> NaiveDateTime.from_iso8601!() + |> NaiveDateTime.add(microseconds, :microsecond) + + "text" -> + data + + "uuid" -> + UUID.binary_to_string!(data) + end + + decode_tuple_data(remainder, columns_remaining - 1, relations, [data | accumulator]) + end defp decode_columns(binary, accumulator \\ []) defp decode_columns(<<>>, accumulator), do: Enum.reverse(accumulator) @@ -345,7 +299,6 @@ defmodule Realtime.Adapters.Postgres.Decoder do name: name, flags: decoded_flags, type: OidDatabase.name_for_type_id(data_type_id), - # type: data_type_id, type_modifier: type_modifier } | accumulator diff --git a/lib/realtime/api.ex b/lib/realtime/api.ex index 23e28feab..22c64f34d 100644 --- a/lib/realtime/api.ex +++ b/lib/realtime/api.ex @@ -6,13 +6,17 @@ defmodule Realtime.Api do import Ecto.Query + alias Ecto.Changeset alias Realtime.Api.Extensions alias Realtime.Api.Tenant alias Realtime.GenCounter + alias Realtime.GenRpc + alias Realtime.Nodes alias Realtime.RateCounter alias Realtime.Repo alias Realtime.Repo.Replica alias Realtime.Tenants + alias Realtime.Tenants.Cache alias Realtime.Tenants.Connect alias RealtimeWeb.SocketDisconnect @@ -109,31 +113,46 @@ defmodule Realtime.Api do """ def create_tenant(attrs) do Logger.debug("create_tenant #{inspect(attrs, pretty: true)}") + tenant_id = Map.get(attrs, :external_id) || Map.get(attrs, "external_id") - %Tenant{} - |> Tenant.changeset(attrs) - |> Repo.insert() + if master_region?() do + %Tenant{} + |> Tenant.changeset(attrs) + |> Repo.insert() + |> case do + {:ok, tenant} -> + Cache.global_cache_update(tenant) + {:ok, tenant} + + error -> + error + end + else + call(:create_tenant, [attrs], tenant_id) + end end @doc """ Updates a tenant. - - ## Examples - - iex> update_tenant(tenant, %{field: new_value}) - {:ok, %Tenant{}} - - iex> update_tenant(tenant, %{field: bad_value}) - {:error, %Ecto.Changeset{}} - """ - def update_tenant(%Tenant{} = tenant, attrs) do + @spec update_tenant_by_external_id(binary(), map()) :: {:ok, Tenant.t()} | {:error, term()} + def update_tenant_by_external_id(tenant_id, attrs) when is_binary(tenant_id) do + if master_region?() do + tenant_id + |> get_tenant_by_external_id(use_replica?: false) + |> update_tenant(attrs) + else + call(:update_tenant_by_external_id, [tenant_id, attrs], tenant_id) + end + end + + defp update_tenant(%Tenant{} = tenant, attrs) do changeset = Tenant.changeset(tenant, attrs) updated = Repo.update(changeset) case updated do {:ok, tenant} -> - maybe_invalidate_cache(changeset) + maybe_update_cache(tenant, changeset) maybe_trigger_disconnect(changeset) maybe_restart_db_connection(changeset) Logger.debug("Tenant updated: #{inspect(tenant, pretty: true)}") @@ -145,63 +164,74 @@ defmodule Realtime.Api do updated end - @doc """ - Deletes a tenant. - - ## Examples - - iex> delete_tenant(tenant) - {:ok, %Tenant{}} - - iex> delete_tenant(tenant) - {:error, %Ecto.Changeset{}} - - """ - def delete_tenant(%Tenant{} = tenant), do: Repo.delete(tenant) - @spec delete_tenant_by_external_id(String.t()) :: boolean() def delete_tenant_by_external_id(id) do - from(t in Tenant, where: t.external_id == ^id) - |> Repo.delete_all() - |> case do - {num, _} when num > 0 -> - true - - _ -> - false + if master_region?() do + query = from(t in Tenant, where: t.external_id == ^id) + {num, _} = Repo.delete_all(query) + num > 0 + else + call(:delete_tenant_by_external_id, [id], id) end end - @spec get_tenant_by_external_id(String.t(), atom()) :: Tenant.t() | nil - def get_tenant_by_external_id(external_id, repo \\ :replica) - when repo in [:primary, :replica] do - repo = - case repo do - :primary -> Repo - :replica -> Replica.replica() - end + @spec get_tenant_by_external_id(String.t(), Keyword.t()) :: Tenant.t() | nil + def get_tenant_by_external_id(external_id, opts \\ []) do + use_replica? = Keyword.get(opts, :use_replica?, true) - Tenant - |> repo.get_by(external_id: external_id) - |> repo.preload(:extensions) + cond do + use_replica? -> + Replica.replica().get_by(Tenant, external_id: external_id) |> Replica.replica().preload(:extensions) + + !use_replica? and master_region?() -> + Repo.get_by(Tenant, external_id: external_id) |> Repo.preload(:extensions) + + true -> + call(:get_tenant_by_external_id, [external_id, opts], external_id) + end end - def list_extensions(type \\ "postgres_cdc_rls") do - from(e in Extensions, - where: e.type == ^type, - select: e - ) - |> Replica.replica().all() + defp list_extensions(type) do + query = from(e in Extensions, where: e.type == ^type, select: e) + replica = Replica.replica() + replica.all(query) end def rename_settings_field(from, to) do - for extension <- list_extensions("postgres_cdc_rls") do - {value, settings} = Map.pop(extension.settings, from) - new_settings = Map.put(settings, to, value) + if master_region?() do + for extension <- list_extensions("postgres_cdc_rls") do + {value, settings} = Map.pop(extension.settings, from) + new_settings = Map.put(settings, to, value) + + extension + |> Changeset.cast(%{settings: new_settings}, [:settings]) + |> Repo.update() + end + else + call(:rename_settings_field, [from, to], from) + end + end - extension - |> Ecto.Changeset.cast(%{settings: new_settings}, [:settings]) - |> Repo.update!() + @spec preload_counters(nil | Realtime.Api.Tenant.t(), any()) :: nil | Realtime.Api.Tenant.t() + @doc """ + Updates the migrations_ran field for a tenant. + """ + @spec update_migrations_ran(binary(), integer()) :: {:ok, Tenant.t()} | {:error, term()} + def update_migrations_ran(external_id, count) do + if master_region?() do + tenant = get_tenant_by_external_id(external_id, use_replica?: false) + + tenant + |> Tenant.changeset(%{migrations_ran: count}) + |> Repo.update() + |> tap(fn result -> + case result do + {:ok, tenant} -> Cache.global_cache_update(tenant) + _ -> :ok + end + end) + else + call(:update_migrations_ran, [external_id, count], external_id) end end @@ -224,26 +254,46 @@ defmodule Realtime.Api do |> Map.put(:events_per_second_now, current) end - defp maybe_invalidate_cache( - %Ecto.Changeset{changes: changes, valid?: true, data: %{external_id: external_id}} = changeset - ) - when changes != %{} and requires_restarting_db_connection(changeset) do - Tenants.Cache.distributed_invalidate_tenant_cache(external_id) + defp maybe_update_cache(tenant, %Changeset{changes: changes, valid?: true}) when changes != %{} do + Tenants.Cache.global_cache_update(tenant) end - defp maybe_invalidate_cache(_changeset), do: nil + defp maybe_update_cache(_tenant, _changeset), do: :ok - defp maybe_trigger_disconnect(%Ecto.Changeset{data: %{external_id: external_id}} = changeset) + defp maybe_trigger_disconnect(%Changeset{data: %{external_id: external_id}} = changeset) when requires_disconnect(changeset) do SocketDisconnect.distributed_disconnect(external_id) end defp maybe_trigger_disconnect(_changeset), do: nil - defp maybe_restart_db_connection(%Ecto.Changeset{data: %{external_id: external_id}} = changeset) + defp maybe_restart_db_connection(%Changeset{data: %{external_id: external_id}} = changeset) when requires_restarting_db_connection(changeset) do Connect.shutdown(external_id) end defp maybe_restart_db_connection(_changeset), do: nil + + defp master_region? do + region = Application.get_env(:realtime, :region) + master_region = Application.get_env(:realtime, :master_region) || region + region == master_region + end + + defp call(operation, args, tenant_id) do + master_region = Application.get_env(:realtime, :master_region) + + with {:ok, master_node} <- Nodes.node_from_region(master_region, self()), + {:ok, result} <- wrapped_call(master_node, operation, args, tenant_id) do + result + end + end + + defp wrapped_call(master_node, operation, args, tenant_id) do + case GenRpc.call(master_node, __MODULE__, operation, args, tenant_id: tenant_id) do + {:error, :rpc_error, reason} -> {:error, reason} + {:error, reason} -> {:error, reason} + result -> {:ok, result} + end + end end diff --git a/lib/realtime/api/message.ex b/lib/realtime/api/message.ex index 90ebc5bc9..1c7bb5b63 100644 --- a/lib/realtime/api/message.ex +++ b/lib/realtime/api/message.ex @@ -8,6 +8,8 @@ defmodule Realtime.Api.Message do @primary_key {:id, Ecto.UUID, autogenerate: true} @schema_prefix "realtime" + @type t :: %__MODULE__{} + @timestamps_opts [type: :naive_datetime_usec] schema "messages" do field(:topic, :string) field(:extension, Ecto.Enum, values: [:broadcast, :presence]) @@ -35,11 +37,11 @@ defmodule Realtime.Api.Message do end defp put_timestamp(changeset, field) do - changeset |> put_change(field, NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second)) + put_change(changeset, field, NaiveDateTime.utc_now(:microsecond)) end defp maybe_put_timestamp(changeset, field) do - case Map.get(changeset.data, field) do + case get_field(changeset, field) do nil -> put_timestamp(changeset, field) _ -> changeset end diff --git a/lib/realtime/api/tenant.ex b/lib/realtime/api/tenant.ex index cf609cafc..17f6fedda 100644 --- a/lib/realtime/api/tenant.ex +++ b/lib/realtime/api/tenant.ex @@ -19,7 +19,7 @@ defmodule Realtime.Api.Tenant do field(:postgres_cdc_default, :string) field(:max_concurrent_users, :integer) field(:max_events_per_second, :integer) - field(:max_presence_events_per_second, :integer, default: 10_000) + field(:max_presence_events_per_second, :integer, default: 1000) field(:max_payload_size_in_kb, :integer, default: 3000) field(:max_bytes_per_second, :integer) field(:max_channels_per_client, :integer) @@ -78,10 +78,11 @@ defmodule Realtime.Api.Tenant do :migrations_ran, :broadcast_adapter ]) - |> validate_required([ - :external_id, - :jwt_secret - ]) + |> validate_required([:external_id]) + |> check_constraint(:jwt_secret, + name: :jwt_secret_or_jwt_jwks_required, + message: "either jwt_secret or jwt_jwks must be provided" + ) |> unique_constraint([:external_id]) |> encrypt_jwt_secret() |> maybe_set_default(:max_bytes_per_second, :tenant_max_bytes_per_second) @@ -102,7 +103,8 @@ defmodule Realtime.Api.Tenant do end end - def encrypt_jwt_secret(changeset) do - update_change(changeset, :jwt_secret, &Crypto.encrypt!/1) - end + def encrypt_jwt_secret(%Ecto.Changeset{valid?: true} = changeset), + do: update_change(changeset, :jwt_secret, &Crypto.encrypt!/1) + + def encrypt_jwt_secret(changeset), do: changeset end diff --git a/lib/realtime/application.ex b/lib/realtime/application.ex index 0f4c9ae50..00058acc9 100644 --- a/lib/realtime/application.ex +++ b/lib/realtime/application.ex @@ -22,7 +22,7 @@ defmodule Realtime.Application do :ok = :logger.set_primary_config( :metadata, - Enum.into([region: System.get_env("REGION")], primary_config.metadata) + Enum.into([region: System.get_env("REGION"), cluster: System.get_env("CLUSTER")], primary_config.metadata) ) topologies = Application.get_env(:libcluster, :topologies) || [] @@ -43,18 +43,20 @@ defmodule Realtime.Application do {Realtime.SignalHandler, %{handler_mod: :erl_signal_handler}} ) - Realtime.PromEx.set_metrics_tags() :ets.new(Realtime.Tenants.Connect, [:named_table, :set, :public]) :syn.set_event_handler(Realtime.SynHandler) - - :ok = :syn.add_node_to_scopes([:users, RegionNodes, Realtime.Tenants.Connect]) + :ok = :syn.add_node_to_scopes([RegionNodes, Realtime.Tenants.Connect | Realtime.UsersCounter.scopes()]) region = Application.get_env(:realtime, :region) - :syn.join(RegionNodes, region, self(), node: node()) - + broadcast_pool_size = Application.get_env(:realtime, :broadcast_pool_size, 10) migration_partition_slots = Application.get_env(:realtime, :migration_partition_slots) connect_partition_slots = Application.get_env(:realtime, :connect_partition_slots) no_channel_timeout_in_ms = Application.get_env(:realtime, :no_channel_timeout_in_ms) + master_region = Application.get_env(:realtime, :master_region) || region + user_scope_shards = Application.fetch_env!(:realtime, :users_scope_shards) + user_scope_broadast_interval_in_ms = Application.get_env(:realtime, :users_scope_broadcast_interval_in_ms, 10_000) + + :syn.join(RegionNodes, region, self(), node: node()) children = [ @@ -62,10 +64,19 @@ defmodule Realtime.Application do Realtime.GenCounter, Realtime.PromEx, {Realtime.Telemetry.Logger, handler_id: "telemetry-logger"}, - Realtime.Repo, RealtimeWeb.Telemetry, {Cluster.Supervisor, [topologies, [name: Realtime.ClusterSupervisor]]}, - {Phoenix.PubSub, name: Realtime.PubSub, pool_size: 10}, + {Phoenix.PubSub, + name: Realtime.PubSub, pool_size: 10, adapter: pubsub_adapter(), broadcast_pool_size: broadcast_pool_size}, + {Beacon, + [ + :users, + [ + partitions: user_scope_shards, + broadcast_interval_in_ms: user_scope_broadast_interval_in_ms, + message_module: Realtime.BeaconPubSubAdapter + ] + ]}, {Cachex, name: Realtime.RateCounter}, Realtime.Tenants.Cache, Realtime.RateCounter.DynamicSupervisor, @@ -98,11 +109,9 @@ defmodule Realtime.Application do RealtimeWeb.Presence ] ++ extensions_supervisors() ++ janitor_tasks() - children = - case Replica.replica() do - Realtime.Repo -> children - replica -> List.insert_at(children, 2, replica) - end + database_connections = if master_region == region, do: [Realtime.Repo], else: [Replica.replica()] + + children = database_connections ++ children # See https://hexdocs.pm/elixir/Supervisor.html # for other strategies and supported options @@ -152,4 +161,12 @@ defmodule Realtime.Application do OpentelemetryPhoenix.setup(adapter: :cowboy2) OpentelemetryEcto.setup([:realtime, :repo], db_statement: :enabled) end + + defp pubsub_adapter do + if Application.fetch_env!(:realtime, :pubsub_adapter) == :gen_rpc do + Realtime.GenRpcPubSub + else + Phoenix.PubSub.PG2 + end + end end diff --git a/lib/realtime/beacon_pub_sub_adapter.ex b/lib/realtime/beacon_pub_sub_adapter.ex new file mode 100644 index 000000000..f4b551f6d --- /dev/null +++ b/lib/realtime/beacon_pub_sub_adapter.ex @@ -0,0 +1,33 @@ +defmodule Realtime.BeaconPubSubAdapter do + @moduledoc "Beacon adapter to use PubSub" + + import Kernel, except: [send: 2] + + @behaviour Beacon.Adapter + + @impl true + def register(scope) do + :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, topic(scope)) + end + + @impl true + def broadcast(scope, message) do + Phoenix.PubSub.broadcast_from(Realtime.PubSub, self(), topic(scope), message) + end + + @impl true + def broadcast(scope, _nodes, message) do + # Notice here that we don't filter by nodes, as PubSub broadcasts to all subscribers + # We are broadcasting to everyone because we want to use the fact that Realtime.PubSub uses + # regional broadcasting which is more efficient in this multi-region setup + + broadcast(scope, message) + end + + @impl true + def send(scope, node, message) do + Phoenix.PubSub.direct_broadcast(node, Realtime.PubSub, topic(scope), message) + end + + defp topic(scope), do: "beacon:#{scope}" +end diff --git a/lib/realtime/context_cache.ex b/lib/realtime/context_cache.ex deleted file mode 100644 index afacf4ce1..000000000 --- a/lib/realtime/context_cache.ex +++ /dev/null @@ -1,21 +0,0 @@ -defmodule Realtime.ContextCache do - @moduledoc """ - Read through cache for hot database paths. - """ - - require Logger - - def apply_fun(context, {fun, arity}, args) do - cache = cache_name(context) - cache_key = {{fun, arity}, args} - - case Cachex.fetch(cache, cache_key, fn {{_fun, _arity}, args} -> {:commit, {:cached, apply(context, fun, args)}} end) do - {:commit, {:cached, value}} -> value - {:ok, {:cached, value}} -> value - end - end - - defp cache_name(context) do - Module.concat(context, Cache) - end -end diff --git a/lib/realtime/database.ex b/lib/realtime/database.ex index ec663c7e0..2a59730ec 100644 --- a/lib/realtime/database.ex +++ b/lib/realtime/database.ex @@ -246,9 +246,9 @@ defmodule Realtime.Database do @spec pool_size_by_application_name(binary(), map() | nil) :: non_neg_integer() def pool_size_by_application_name(application_name, settings) do case application_name do - "realtime_subscription_manager" -> settings["subcriber_pool_size"] || 1 + "realtime_subscription_manager" -> 1 "realtime_subscription_manager_pub" -> settings["subs_pool_size"] || 1 - "realtime_subscription_checker" -> settings["subs_pool_size"] || 1 + "realtime_subscription_checker" -> 1 "realtime_connect" -> settings["db_pool"] || 1 "realtime_health_check" -> 1 "realtime_janitor" -> 1 diff --git a/lib/realtime/gen_rpc.ex b/lib/realtime/gen_rpc.ex index bb7099242..4d931af23 100644 --- a/lib/realtime/gen_rpc.ex +++ b/lib/realtime/gen_rpc.ex @@ -10,6 +10,52 @@ defmodule Realtime.GenRpc do @type result :: any | {:error, :rpc_error, reason :: any} + @doc """ + Broadcasts the message `msg` asynchronously to the registered process `name` on the specified `nodes`. + + Options: + + - `:key` - Optional key to consistently select the same gen_rpc clients to guarantee message order between nodes + """ + @spec abcast([node], atom, any, keyword()) :: :ok + def abcast(nodes, name, msg, opts) when is_list(nodes) and is_atom(name) and is_list(opts) do + key = Keyword.get(opts, :key, nil) + nodes = rpc_nodes(nodes, key) + + :gen_rpc.abcast(nodes, name, msg) + :ok + end + + @doc """ + Fire and forget apply(mod, func, args) on one node + + Options: + + - `:key` - Optional key to consistently select the same gen_rpc client to guarantee some message order between nodes + """ + @spec cast(node, module, atom, list(any), keyword()) :: :ok + def cast(node, mod, func, args, opts \\ []) + + # Local + def cast(node, mod, func, args, _opts) when node == node() do + :erpc.cast(node, mod, func, args) + :ok + end + + def cast(node, mod, func, args, opts) + when is_atom(node) and is_atom(mod) and is_atom(func) and is_list(args) and is_list(opts) do + key = Keyword.get(opts, :key, nil) + + # Ensure this node is part of the connected nodes + if node in Node.list() do + node_key = rpc_node(node, key) + + :gen_rpc.cast(node_key, mod, func, args) + end + + :ok + end + @doc """ Fire and forget apply(mod, func, args) on all nodes @@ -35,12 +81,29 @@ defmodule Realtime.GenRpc do Options: - `:key` - Optional key to consistently select the same gen_rpc clients to guarantee message order between nodes - - `:tenant_id` - Tenant ID for telemetry and logging, defaults to nil + - `:tenant_id` - Tenant ID for logging, defaults to nil - `:timeout` - timeout in milliseconds for the RPC call, defaults to 5000ms """ @spec call(node, module, atom, list(any), keyword()) :: result def call(node, mod, func, args, opts) when is_atom(node) and is_atom(mod) and is_atom(func) and is_list(args) and is_list(opts) do + if node == node() or node in Node.list() do + do_call(node, mod, func, args, opts) + else + tenant_id = Keyword.get(opts, :tenant_id) + + log_error( + "ErrorOnRpcCall", + %{target: node, mod: mod, func: func, error: :badnode}, + project: tenant_id, + external_id: tenant_id + ) + + {:error, :rpc_error, :badnode} + end + end + + defp do_call(node, mod, func, args, opts) do timeout = Keyword.get(opts, :timeout, default_rpc_timeout()) tenant_id = Keyword.get(opts, :tenant_id) key = Keyword.get(opts, :key, nil) @@ -57,16 +120,16 @@ defmodule Realtime.GenRpc do external_id: tenant_id ) - telemetry_failure(node, latency, tenant_id) + telemetry_failure(node, latency) {:error, :rpc_error, reason} {:error, _} -> - telemetry_failure(node, latency, tenant_id) + telemetry_failure(node, latency) response _ -> - telemetry_success(node, latency, tenant_id) + telemetry_success(node, latency) response end end @@ -121,32 +184,32 @@ defmodule Realtime.GenRpc do external_id: tenant_id ) - telemetry_failure(node, latency, tenant_id) + telemetry_failure(node, latency) {node, result} {node, latency, {:ok, _} = result} -> - telemetry_success(node, latency, tenant_id) + telemetry_success(node, latency) {node, result} {node, latency, result} -> - telemetry_failure(node, latency, tenant_id) + telemetry_failure(node, latency) {node, result} end) end - defp telemetry_success(node, latency, tenant_id) do + defp telemetry_success(node, latency) do Telemetry.execute( [:realtime, :rpc], %{latency: latency}, - %{origin_node: node(), target_node: node, success: true, tenant: tenant_id, mechanism: :gen_rpc} + %{origin_node: node(), target_node: node, success: true, mechanism: :gen_rpc} ) end - defp telemetry_failure(node, latency, tenant_id) do + defp telemetry_failure(node, latency) do Telemetry.execute( [:realtime, :rpc], %{latency: latency}, - %{origin_node: node(), target_node: node, success: false, tenant: tenant_id, mechanism: :gen_rpc} + %{origin_node: node(), target_node: node, success: false, mechanism: :gen_rpc} ) end diff --git a/lib/realtime/gen_rpc/pub_sub.ex b/lib/realtime/gen_rpc/pub_sub.ex new file mode 100644 index 000000000..2a531b77d --- /dev/null +++ b/lib/realtime/gen_rpc/pub_sub.ex @@ -0,0 +1,131 @@ +defmodule Realtime.GenRpcPubSub do + @moduledoc """ + gen_rpc Phoenix.PubSub adapter + """ + + @behaviour Phoenix.PubSub.Adapter + alias Realtime.GenRpc + alias Realtime.GenRpcPubSub.Worker + alias Realtime.Nodes + use Supervisor + + @impl true + def node_name(_), do: node() + + # Supervisor callbacks + + def start_link(opts) do + adapter_name = Keyword.fetch!(opts, :adapter_name) + name = Keyword.fetch!(opts, :name) + pool_size = Keyword.get(opts, :pool_size, 1) + broadcast_pool_size = Keyword.get(opts, :broadcast_pool_size, pool_size) + + Supervisor.start_link(__MODULE__, {adapter_name, name, broadcast_pool_size}, + name: :"#{name}#{adapter_name}_supervisor" + ) + end + + @impl true + def init({adapter_name, pubsub, pool_size}) do + workers = for number <- 1..pool_size, do: :"#{pubsub}#{adapter_name}_#{number}" + + :persistent_term.put(adapter_name, List.to_tuple(workers)) + + children = + for worker <- workers do + Supervisor.child_spec({Realtime.GenRpcPubSub.Worker, {pubsub, worker}}, id: worker) + end + + Supervisor.init(children, strategy: :one_for_one) + end + + defp worker_name(adapter_name, key) do + workers = :persistent_term.get(adapter_name) + elem(workers, :erlang.phash2(key, tuple_size(workers))) + end + + @impl true + def broadcast(adapter_name, topic, message, dispatcher) do + worker = worker_name(adapter_name, self()) + + if Application.get_env(:realtime, :regional_broadcasting, false) do + my_region = Application.get_env(:realtime, :region) + # broadcast to all other nodes in the region + + other_nodes = for node <- Realtime.Nodes.region_nodes(my_region), node != node(), do: node + GenRpc.abcast(other_nodes, worker, Worker.forward_to_local(topic, message, dispatcher), key: self()) + + # send a message to a node in each region to forward to the rest of the region + other_region_nodes = nodes_from_other_regions(my_region, self()) + + GenRpc.abcast(other_region_nodes, worker, Worker.forward_to_region(topic, message, dispatcher), key: self()) + else + GenRpc.abcast(Node.list(), worker, Worker.forward_to_local(topic, message, dispatcher), key: self()) + end + + :ok + end + + defp nodes_from_other_regions(my_region, key) do + Enum.flat_map(Nodes.all_node_regions(), fn + ^my_region -> + [] + + region -> + case Nodes.node_from_region(region, key) do + {:ok, node} -> [node] + _ -> [] + end + end) + end + + @impl true + def direct_broadcast(adapter_name, node_name, topic, message, dispatcher) do + worker = worker_name(adapter_name, self()) + GenRpc.abcast([node_name], worker, Worker.forward_to_local(topic, message, dispatcher), key: self()) + end +end + +defmodule Realtime.GenRpcPubSub.Worker do + @moduledoc false + use GenServer + + def forward_to_local(topic, message, dispatcher), do: {:ftl, topic, message, dispatcher} + def forward_to_region(topic, message, dispatcher), do: {:ftr, topic, message, dispatcher} + + @doc false + def start_link({pubsub, worker}), do: GenServer.start_link(__MODULE__, {pubsub, worker}, name: worker) + + @impl true + def init({pubsub, worker}) do + Process.flag(:message_queue_data, :off_heap) + Process.flag(:fullsweep_after, 20) + {:ok, {pubsub, worker}} + end + + @impl true + # Forward to local + def handle_info({:ftl, topic, message, dispatcher}, {pubsub, worker}) do + Phoenix.PubSub.local_broadcast(pubsub, topic, message, dispatcher) + {:noreply, {pubsub, worker}} + end + + # Forward to the rest of the region + def handle_info({:ftr, topic, message, dispatcher}, {pubsub, worker}) do + # Forward to local first + Phoenix.PubSub.local_broadcast(pubsub, topic, message, dispatcher) + + # Then broadcast to the rest of my region + my_region = Application.get_env(:realtime, :region) + other_nodes = for node <- Realtime.Nodes.region_nodes(my_region), node != node(), do: node + + if other_nodes != [] do + Realtime.GenRpc.abcast(other_nodes, worker, forward_to_local(topic, message, dispatcher), []) + end + + {:noreply, {pubsub, worker}} + end + + @impl true + def handle_info(_, pubsub), do: {:noreply, pubsub} +end diff --git a/lib/realtime/messages.ex b/lib/realtime/messages.ex index c6d571db7..e209461a2 100644 --- a/lib/realtime/messages.ex +++ b/lib/realtime/messages.ex @@ -3,6 +3,69 @@ defmodule Realtime.Messages do Handles `realtime.messages` table operations """ + alias Realtime.Api.Message + + import Ecto.Query, only: [from: 2] + + @hard_limit 25 + @default_timeout 5_000 + + @doc """ + Fetch last `limit ` messages for a given `topic` inserted after `since` + + Automatically uses RPC if the database connection is not in the same node + + Only allowed for private channels + """ + @spec replay(pid, String.t(), String.t(), non_neg_integer, non_neg_integer) :: + {:ok, Message.t(), [String.t()]} | {:error, term} | {:error, :rpc_error, term} + def replay(conn, tenant_id, topic, since, limit) + when node(conn) == node() and is_integer(since) and is_integer(limit) do + limit = max(min(limit, @hard_limit), 1) + + with {:ok, since} <- DateTime.from_unix(since, :millisecond), + {:ok, messages} <- messages(conn, tenant_id, topic, since, limit) do + {:ok, Enum.reverse(messages), MapSet.new(messages, & &1.id)} + else + {:error, :postgrex_exception} -> {:error, :failed_to_replay_messages} + {:error, :invalid_unix_time} -> {:error, :invalid_replay_params} + error -> error + end + end + + def replay(conn, tenant_id, topic, since, limit) when is_integer(since) and is_integer(limit) do + Realtime.GenRpc.call(node(conn), __MODULE__, :replay, [conn, tenant_id, topic, since, limit], + key: topic, + tenant_id: tenant_id + ) + end + + def replay(_, _, _, _, _), do: {:error, :invalid_replay_params} + + defp messages(conn, tenant_id, topic, since, limit) do + since = DateTime.to_naive(since) + # We want to avoid searching partitions in the future as they should be empty + # so we limit to 1 minute in the future to account for any potential drift + now = NaiveDateTime.utc_now() |> NaiveDateTime.add(1, :minute) + + query = + from m in Message, + where: + m.topic == ^topic and + m.private == true and + m.extension == :broadcast and + m.inserted_at >= ^since and + m.inserted_at < ^now, + limit: ^limit, + order_by: [desc: m.inserted_at] + + {latency, value} = + :timer.tc(Realtime.Tenants.Repo, :all, [conn, query, Message, [timeout: @default_timeout]], :millisecond) + + :telemetry.execute([:realtime, :tenants, :replay], %{latency: latency}, %{tenant: tenant_id}) + value + end + @doc """ Deletes messages older than 72 hours for a given tenant connection """ diff --git a/lib/realtime/metrics_cleaner.ex b/lib/realtime/metrics_cleaner.ex index 773fb4c86..20fd586d6 100644 --- a/lib/realtime/metrics_cleaner.ex +++ b/lib/realtime/metrics_cleaner.ex @@ -18,7 +18,7 @@ defmodule Realtime.MetricsCleaner do def handle_info(:check, %{interval: interval} = state) do Process.cancel_timer(state.check_ref) - {exec_time, _} = :timer.tc(fn -> loop_and_cleanup_metrics_table() end) + {exec_time, _} = :timer.tc(fn -> loop_and_cleanup_metrics_table() end, :millisecond) if exec_time > :timer.seconds(5), do: Logger.warning("Metrics check took: #{exec_time} ms") @@ -31,33 +31,20 @@ defmodule Realtime.MetricsCleaner do {:noreply, state} end - defp check(interval) do - Process.send_after(self(), :check, interval) - end + defp check(interval), do: Process.send_after(self(), :check, interval) + + @peep_filter_spec [{{{:_, %{tenant: :"$1"}}, :_}, [{:is_binary, :"$1"}], [:"$1"]}] - @table_name :"syn_registry_by_name_Elixir.Realtime.Tenants.Connect" - @metrics_table Realtime.PromEx.Metrics - @filter_spec [{{{:_, %{tenant: :"$1"}}, :_}, [], [:"$1"]}] - @tenant_id_spec [{{:"$1", :_, :_, :_, :_, :_}, [], [:"$1"]}] defp loop_and_cleanup_metrics_table do - tenant_ids = :ets.select(@table_name, @tenant_id_spec) + tenant_ids = Realtime.Tenants.Connect.list_tenants() |> MapSet.new() - :ets.select(@metrics_table, @filter_spec) - |> Enum.uniq() - |> Enum.reject(fn tenant_id -> tenant_id in tenant_ids end) - |> Enum.each(fn tenant_id -> delete_metric(tenant_id) end) - end + {_, {tid, _}} = Peep.Persistent.storage(Realtime.PromEx.Metrics) - @doc """ - Deletes all metrics that contain the given tenant or database_host. - """ - @spec delete_metric(String.t()) :: :ok - def delete_metric(tenant) do - :ets.select_delete(@metrics_table, [ - {{{:_, %{tenant: tenant}}, :_}, [], [true]}, - {{{:_, %{database_host: "db.#{tenant}.supabase.co"}}, :_}, [], [true]} - ]) - - :ok + tid + |> :ets.select(@peep_filter_spec) + |> Enum.uniq() + |> Stream.reject(fn tenant_id -> MapSet.member?(tenant_ids, tenant_id) end) + |> Enum.map(fn tenant_id -> %{tenant: tenant_id} end) + |> then(&Peep.prune_tags(Realtime.PromEx.Metrics, &1)) end end diff --git a/lib/realtime/monitoring/erl_sys_mon.ex b/lib/realtime/monitoring/erl_sys_mon.ex index 32a4f857b..3278886d6 100644 --- a/lib/realtime/monitoring/erl_sys_mon.ex +++ b/lib/realtime/monitoring/erl_sys_mon.ex @@ -10,8 +10,8 @@ defmodule Realtime.ErlSysMon do @defaults [ :busy_dist_port, :busy_port, - {:long_gc, 250}, - {:long_schedule, 100}, + {:long_gc, 500}, + {:long_schedule, 500}, {:long_message_queue, {0, 1_000}} ] @@ -24,8 +24,36 @@ defmodule Realtime.ErlSysMon do {:ok, []} end + def handle_info({:monitor, pid, _type, _meta} = msg, state) when is_pid(pid) do + log_process_info(msg, pid) + {:noreply, state} + end + def handle_info(msg, state) do - Logger.error("#{__MODULE__} message: " <> inspect(msg)) + Logger.warning("#{__MODULE__} message: " <> inspect(msg)) {:noreply, state} end + + defp log_process_info(msg, pid) do + pid_info = + pid + |> Process.info(:dictionary) + |> case do + {:dictionary, dict} when is_list(dict) -> + {List.keyfind(dict, :"$initial_call", 0), List.keyfind(dict, :"$ancestors", 0)} + + other -> + other + end + + extra_info = Process.info(pid, [:registered_name, :message_queue_len, :total_heap_size]) + + Logger.warning( + "#{__MODULE__} message: " <> + inspect(msg) <> "|\n process info: #{inspect(pid_info)} #{inspect(extra_info)}" + ) + rescue + _ -> + Logger.warning("#{__MODULE__} message: " <> inspect(msg)) + end end diff --git a/lib/realtime/monitoring/latency.ex b/lib/realtime/monitoring/latency.ex index 52c46adb4..d9ddd0d9a 100644 --- a/lib/realtime/monitoring/latency.ex +++ b/lib/realtime/monitoring/latency.ex @@ -7,7 +7,7 @@ defmodule Realtime.Latency do use Realtime.Logs alias Realtime.Nodes - alias Realtime.Rpc + alias Realtime.GenRpc defmodule Payload do @moduledoc false @@ -33,7 +33,7 @@ defmodule Realtime.Latency do } end - @every 5_000 + @every 15_000 def start_link(args) do GenServer.start_link(__MODULE__, args, name: __MODULE__) end @@ -76,7 +76,7 @@ defmodule Realtime.Latency do Task.Supervisor.async(Realtime.TaskSupervisor, fn -> {latency, response} = :timer.tc(fn -> - Rpc.call(n, __MODULE__, :pong, [pong_timeout], timeout: timer_timeout) + GenRpc.call(n, __MODULE__, :pong, [pong_timeout], timeout: timer_timeout) end) latency_ms = latency / 1_000 @@ -85,7 +85,7 @@ defmodule Realtime.Latency do from_node = Nodes.short_node_id_from_name(Node.self()) case response do - {:badrpc, reason} -> + {:error, :rpc_error, reason} -> log_error( "RealtimeNodeDisconnected", "Unable to connect to #{short_name} from #{region}: #{reason}" diff --git a/lib/realtime/monitoring/prom_ex.ex b/lib/realtime/monitoring/prom_ex.ex index 9c0db0d87..f04d5ef01 100644 --- a/lib/realtime/monitoring/prom_ex.ex +++ b/lib/realtime/monitoring/prom_ex.ex @@ -1,5 +1,4 @@ defmodule Realtime.PromEx do - alias Realtime.Nodes alias Realtime.PromEx.Plugins.Channels alias Realtime.PromEx.Plugins.Distributed alias Realtime.PromEx.Plugins.GenRpc @@ -65,6 +64,29 @@ defmodule Realtime.PromEx do alias PromEx.Plugins + defmodule Store do + @moduledoc false + # Custom store to set global tags + + @behaviour PromEx.Storage + + @impl true + def scrape(name) do + Peep.get_all_metrics(name) + |> Realtime.Monitoring.Prometheus.export() + end + + @impl true + def child_spec(name, metrics) do + Peep.child_spec( + name: name, + metrics: metrics, + global_tags: Application.get_env(:realtime, :metrics_tags, %{}), + storage: {:default, 4} + ) + end + end + @impl true def plugins do poll_rate = Application.get_env(:realtime, :prom_poll_rate) @@ -105,55 +127,11 @@ defmodule Realtime.PromEx do end def get_metrics do - %{ - region: region, - node_host: node_host, - short_alloc_id: short_alloc_id - } = get_metrics_tags() - - def_tags = "host=\"#{node_host}\",region=\"#{region}\",id=\"#{short_alloc_id}\"" - - metrics = - PromEx.get_metrics(Realtime.PromEx) - |> String.split("\n") - |> Enum.map_join("\n", fn line -> - case Regex.run(~r/(?!\#)^(\w+)(?:{(.*?)})?\s*(.+)$/, line) do - nil -> - line - - [_, key, tags, value] -> - tags = if tags == "", do: def_tags, else: tags <> "," <> def_tags - - "#{key}{#{tags}} #{value}" - end - end) + metrics = PromEx.get_metrics(Realtime.PromEx) Realtime.PromEx.__ets_cron_flusher_name__() |> PromEx.ETSCronFlusher.defer_ets_flush() metrics end - - @doc "Compressed metrics using :zlib.compress/1" - @spec get_compressed_metrics() :: binary() - def get_compressed_metrics do - get_metrics() - |> :zlib.compress() - end - - def set_metrics_tags do - [_, node_host] = node() |> Atom.to_string() |> String.split("@") - - metrics_tags = %{ - region: Application.get_env(:realtime, :region), - node_host: node_host, - short_alloc_id: Nodes.short_node_id_from_name(node()) - } - - Application.put_env(:realtime, :metrics_tags, metrics_tags) - end - - def get_metrics_tags do - Application.get_env(:realtime, :metrics_tags) - end end diff --git a/lib/realtime/monitoring/prom_ex/plugins/distributed.ex b/lib/realtime/monitoring/prom_ex/plugins/distributed.ex index 060f28036..927b8ac88 100644 --- a/lib/realtime/monitoring/prom_ex/plugins/distributed.ex +++ b/lib/realtime/monitoring/prom_ex/plugins/distributed.ex @@ -70,7 +70,8 @@ defmodule Realtime.PromEx.Plugins.Distributed do measurement: :size, tags: [:origin_node, :target_node] ) - ] + ], + detach_on_error: false ) end diff --git a/lib/realtime/monitoring/prom_ex/plugins/gen_rpc.ex b/lib/realtime/monitoring/prom_ex/plugins/gen_rpc.ex index a4542a889..59e32c4ef 100644 --- a/lib/realtime/monitoring/prom_ex/plugins/gen_rpc.ex +++ b/lib/realtime/monitoring/prom_ex/plugins/gen_rpc.ex @@ -71,7 +71,8 @@ defmodule Realtime.PromEx.Plugins.GenRpc do measurement: :size, tags: [:origin_node, :target_node] ) - ] + ], + detach_on_error: false ) end diff --git a/lib/realtime/monitoring/prom_ex/plugins/osmon.ex b/lib/realtime/monitoring/prom_ex/plugins/osmon.ex index 67d1fcb71..9bd15fb91 100644 --- a/lib/realtime/monitoring/prom_ex/plugins/osmon.ex +++ b/lib/realtime/monitoring/prom_ex/plugins/osmon.ex @@ -57,7 +57,8 @@ defmodule Realtime.PromEx.Plugins.OsMon do description: "The average system load in the last 15 minutes.", measurement: :avg15 ) - ] + ], + detach_on_error: false ) end diff --git a/lib/realtime/monitoring/prom_ex/plugins/phoenix.ex b/lib/realtime/monitoring/prom_ex/plugins/phoenix.ex index d3f64afbe..7e8828b63 100644 --- a/lib/realtime/monitoring/prom_ex/plugins/phoenix.ex +++ b/lib/realtime/monitoring/prom_ex/plugins/phoenix.ex @@ -51,26 +51,27 @@ if Code.ensure_loaded?(Phoenix) do description: "The total open connections to ranch.", measurement: :active ) - ] + ], + detach_on_error: false ) end def execute_metrics do active_conn = - case :ets.lookup(:ranch_server, {:listener_sup, HTTP}) do - [] -> - -1 - - _ -> - HTTP - |> :ranch_server.get_connections_sup() - |> :supervisor.count_children() - |> Keyword.get(:active) + if :ranch.info()[HTTP] do + :ranch.info(HTTP)[:all_connections] + else + -1 end :telemetry.execute(@event_all_connections, %{active: active_conn}, %{}) end + defmodule Buckets do + @moduledoc false + use Peep.Buckets.Custom, buckets: [10, 100, 500, 1_000, 5_000, 10_000] + end + defp channel_events(metric_prefix) do Event.build( :phoenix_channel_event_metrics, @@ -99,9 +100,7 @@ if Code.ensure_loaded?(Phoenix) do event_name: [:phoenix, :channel_handled_in], measurement: :duration, description: "The time it takes for the application to respond to channel messages.", - reporter_options: [ - buckets: [10, 100, 500, 1_000, 5_000, 10_000] - ], + reporter_options: [peep_bucket_calculator: Buckets], tag_values: fn %{socket: %Socket{endpoint: endpoint}} -> %{ endpoint: normalize_module_name(endpoint) @@ -124,17 +123,16 @@ if Code.ensure_loaded?(Phoenix) do event_name: [:phoenix, :socket_connected], measurement: :duration, description: "The time it takes for the application to establish a socket connection.", - reporter_options: [ - buckets: [10, 100, 500, 1_000, 5_000, 10_000] - ], - tag_values: fn %{result: result, endpoint: endpoint, transport: transport} -> + reporter_options: [peep_bucket_calculator: Buckets], + tag_values: fn %{result: result, endpoint: endpoint, transport: transport, serializer: serializer} -> %{ transport: transport, result: result, - endpoint: normalize_module_name(endpoint) + endpoint: normalize_module_name(endpoint), + serializer: serializer } end, - tags: [:result, :transport, :endpoint], + tags: [:result, :transport, :endpoint, :serializer], unit: {:native, :millisecond} ) ] diff --git a/lib/realtime/monitoring/prom_ex/plugins/tenant.ex b/lib/realtime/monitoring/prom_ex/plugins/tenant.ex index 1bd324624..485be55c6 100644 --- a/lib/realtime/monitoring/prom_ex/plugins/tenant.ex +++ b/lib/realtime/monitoring/prom_ex/plugins/tenant.ex @@ -22,11 +22,16 @@ defmodule Realtime.PromEx.Plugins.Tenant do [ channel_events(), replication_metrics(), - subscription_metrics(), payload_size_metrics() ] end + defmodule PayloadSize.Buckets do + @moduledoc false + use Peep.Buckets.Custom, + buckets: [250, 500, 1000, 3000, 5000, 10_000, 25_000, 100_000, 500_000, 1_000_000, 3_000_000] + end + defp payload_size_metrics do Event.build( :realtime_tenant_payload_size_metrics, @@ -36,21 +41,18 @@ defmodule Realtime.PromEx.Plugins.Tenant do event_name: [:realtime, :tenants, :payload, :size], measurement: :size, description: "Tenant payload size", - tags: [:tenant], + tags: [:tenant, :message_type], unit: :byte, - reporter_options: [ - buckets: [100, 250, 500, 1000, 2000, 3000, 5000, 10_000, 25_000] - ] + reporter_options: [peep_bucket_calculator: PayloadSize.Buckets] ), distribution( [:realtime, :payload, :size], event_name: [:realtime, :tenants, :payload, :size], measurement: :size, description: "Payload size", + tags: [:message_type], unit: :byte, - reporter_options: [ - buckets: [100, 250, 500, 1000, 2000, 3000, 5000, 10_000, 25_000] - ] + reporter_options: [peep_bucket_calculator: PayloadSize.Buckets] ) ] ) @@ -75,36 +77,40 @@ defmodule Realtime.PromEx.Plugins.Tenant do description: "The cluster total count of connected clients for a tenant.", measurement: :connected_cluster, tags: [:tenant] - ), - last_value( - [:realtime, :connections, :limit_concurrent], - event_name: [:realtime, :connections], - description: "The total count of connected clients for a tenant.", - measurement: :limit, - tags: [:tenant] ) - ] + ], + detach_on_error: false ) end def execute_tenant_metrics do tenants = Tenants.list_connected_tenants(Node.self()) + cluster_counts = UsersCounter.tenant_counts() + node_counts = UsersCounter.tenant_counts(Node.self()) for t <- tenants do - count = UsersCounter.tenant_users(Node.self(), t) - cluster_count = UsersCounter.tenant_users(t) tenant = Tenants.Cache.get_tenant_by_external_id(t) if tenant != nil do Telemetry.execute( [:realtime, :connections], - %{connected: count, connected_cluster: cluster_count, limit: tenant.max_concurrent_users}, + %{ + connected: Map.get(node_counts, t, 0), + connected_cluster: Map.get(cluster_counts, t, 0), + limit: tenant.max_concurrent_users + }, %{tenant: t} ) end end end + defmodule Replication.Buckets do + @moduledoc false + use Peep.Buckets.Custom, + buckets: [250, 500, 1000, 3000, 5000, 10_000, 25_000, 100_000, 500_000, 1_000_000, 3_000_000] + end + defp replication_metrics do Event.build( :realtime_tenant_replication_event_metrics, @@ -116,34 +122,25 @@ defmodule Realtime.PromEx.Plugins.Tenant do description: "Duration of the logical replication slot polling query for Realtime RLS.", tags: [:tenant], unit: {:microsecond, :millisecond}, - reporter_options: [ - buckets: [125, 250, 500, 1_000, 2_000, 4_000, 8_000, 16_000] - ] + reporter_options: [peep_bucket_calculator: Replication.Buckets] ) ] ) end - defp subscription_metrics do - Event.build( - :realtime_tenant_channel_event_metrics, - [ - sum( - [:realtime, :subscriptions_checker, :pid_not_found], - event_name: [:realtime, :subscriptions_checker, :pid_not_found], - measurement: :sum, - description: "Sum of pids not found in Subscription tables.", - tags: [:tenant] - ), - sum( - [:realtime, :subscriptions_checker, :phantom_pid_detected], - event_name: [:realtime, :subscriptions_checker, :phantom_pid_detected], - measurement: :sum, - description: "Sum of phantom pids detected in Subscription tables.", - tags: [:tenant] - ) - ] - ) + defmodule PolicyAuthorization.Buckets do + @moduledoc false + use Peep.Buckets.Custom, buckets: [10, 250, 5000, 15_000] + end + + defmodule BroadcastFromDatabase.Buckets do + @moduledoc false + use Peep.Buckets.Custom, buckets: [10, 250, 5000] + end + + defmodule Replay.Buckets do + @moduledoc false + use Peep.Buckets.Custom, buckets: [10, 250, 5000, 15_000] end defp channel_events do @@ -157,6 +154,12 @@ defmodule Realtime.PromEx.Plugins.Tenant do description: "Sum of messages sent on a Realtime Channel.", tags: [:tenant] ), + sum( + [:realtime, :channel, :global, :events], + event_name: [:realtime, :rate_counter, :channel, :events], + measurement: :sum, + description: "Global sum of messages sent on a Realtime Channel." + ), sum( [:realtime, :channel, :presence_events], event_name: [:realtime, :rate_counter, :channel, :presence_events], @@ -164,6 +167,12 @@ defmodule Realtime.PromEx.Plugins.Tenant do description: "Sum of presence messages sent on a Realtime Channel.", tags: [:tenant] ), + sum( + [:realtime, :channel, :global, :presence_events], + event_name: [:realtime, :rate_counter, :channel, :presence_events], + measurement: :sum, + description: "Global sum of presence messages sent on a Realtime Channel." + ), sum( [:realtime, :channel, :db_events], event_name: [:realtime, :rate_counter, :channel, :db_events], @@ -171,6 +180,12 @@ defmodule Realtime.PromEx.Plugins.Tenant do description: "Sum of db messages sent on a Realtime Channel.", tags: [:tenant] ), + sum( + [:realtime, :channel, :global, :db_events], + event_name: [:realtime, :rate_counter, :channel, :db_events], + measurement: :sum, + description: "Global sum of db messages sent on a Realtime Channel." + ), sum( [:realtime, :channel, :joins], event_name: [:realtime, :rate_counter, :channel, :joins], @@ -178,18 +193,18 @@ defmodule Realtime.PromEx.Plugins.Tenant do description: "Sum of Realtime Channel joins.", tags: [:tenant] ), - last_value( - [:realtime, :channel, :events, :limit_per_second], - event_name: [:realtime, :rate_counter, :channel, :events], - measurement: :limit, - description: "Rate limit of messages per second sent on a Realtime Channel.", + sum( + [:realtime, :channel, :input_bytes], + event_name: [:realtime, :channel, :input_bytes], + description: "Sum of input bytes sent on sockets.", + measurement: :size, tags: [:tenant] ), - last_value( - [:realtime, :channel, :joins, :limit_per_second], - event_name: [:realtime, :rate_counter, :channel, :joins], - measurement: :limit, - description: "Rate limit of joins per second on a Realtime Channel.", + sum( + [:realtime, :channel, :output_bytes], + event_name: [:realtime, :channel, :output_bytes], + description: "Sum of output bytes sent on sockets.", + measurement: :size, tags: [:tenant] ), distribution( @@ -199,7 +214,7 @@ defmodule Realtime.PromEx.Plugins.Tenant do unit: :millisecond, description: "Latency of read authorization checks.", tags: [:tenant], - reporter_options: [buckets: [10, 250, 5000, 15_000]] + reporter_options: [peep_bucket_calculator: PolicyAuthorization.Buckets] ), distribution( [:realtime, :tenants, :write_authorization_check], @@ -208,7 +223,7 @@ defmodule Realtime.PromEx.Plugins.Tenant do unit: :millisecond, description: "Latency of write authorization checks.", tags: [:tenant], - reporter_options: [buckets: [10, 250, 5000, 15_000]] + reporter_options: [peep_bucket_calculator: PolicyAuthorization.Buckets] ), distribution( [:realtime, :tenants, :broadcast_from_database, :latency_committed_at], @@ -217,16 +232,25 @@ defmodule Realtime.PromEx.Plugins.Tenant do unit: :millisecond, description: "Latency of database transaction start until reaches server to be broadcasted", tags: [:tenant], - reporter_options: [buckets: [10, 250, 5000]] + reporter_options: [peep_bucket_calculator: BroadcastFromDatabase.Buckets] ), distribution( [:realtime, :tenants, :broadcast_from_database, :latency_inserted_at], event_name: [:realtime, :tenants, :broadcast_from_database], measurement: :latency_inserted_at, - unit: :second, + unit: {:microsecond, :millisecond}, description: "Latency of database inserted_at until reaches server to be broadcasted", tags: [:tenant], - reporter_options: [buckets: [1, 2, 5]] + reporter_options: [peep_bucket_calculator: BroadcastFromDatabase.Buckets] + ), + distribution( + [:realtime, :tenants, :replay], + event_name: [:realtime, :tenants, :replay], + measurement: :latency, + unit: :millisecond, + description: "Latency of broadcast replay", + tags: [:tenant], + reporter_options: [peep_bucket_calculator: Replay.Buckets] ) ] ) diff --git a/lib/realtime/monitoring/prom_ex/plugins/tenants.ex b/lib/realtime/monitoring/prom_ex/plugins/tenants.ex index 0035e9594..f145af830 100644 --- a/lib/realtime/monitoring/prom_ex/plugins/tenants.ex +++ b/lib/realtime/monitoring/prom_ex/plugins/tenants.ex @@ -8,19 +8,24 @@ defmodule Realtime.PromEx.Plugins.Tenants do require Logger + defmodule Buckets do + @moduledoc false + use Peep.Buckets.Custom, buckets: [10, 250, 5000, 15_000] + end + @event_connected [:prom_ex, :plugin, :realtime, :tenants, :connected] @impl true def event_metrics(_) do Event.build(:realtime, [ distribution( - [:realtime, :rpc], + [:realtime, :global, :rpc], event_name: [:realtime, :rpc], - description: "Latency of rpc calls triggered by a tenant action", + description: "Global Latency of rpc calls", measurement: :latency, unit: {:microsecond, :millisecond}, - tags: [:success, :tenant, :mechanism], - reporter_options: [buckets: [10, 250, 5000, 15_000]] + tags: [:success, :mechanism], + reporter_options: [peep_bucket_calculator: Buckets] ) ]) end @@ -41,7 +46,8 @@ defmodule Realtime.PromEx.Plugins.Tenants do description: "The total count of connected tenants.", measurement: :connected ) - ] + ], + detach_on_error: false ) ] end diff --git a/lib/realtime/monitoring/prometheus.ex b/lib/realtime/monitoring/prometheus.ex new file mode 100644 index 000000000..ef100f1bc --- /dev/null +++ b/lib/realtime/monitoring/prometheus.ex @@ -0,0 +1,193 @@ +# Based on https://github.com/rkallos/peep/blob/708546ed069aebdf78ac1f581130332bd2e8b5b1/lib/peep/prometheus.ex +defmodule Realtime.Monitoring.Prometheus do + @moduledoc """ + Prometheus exporter module + + Use a temporary ets table to cache formatted names and label values + """ + + alias Telemetry.Metrics.{Counter, Distribution, LastValue, Sum} + + def export(metrics) do + cache = :ets.new(:cache, [:set, :private, read_concurrency: false, write_concurrency: :auto]) + + result = [Enum.map(metrics, &format(&1, cache)), "# EOF\n"] + :ets.delete(cache) + result + end + + defp format({%Counter{}, _series} = metric, cache) do + format_standard(metric, "counter", cache) + end + + defp format({%Sum{} = spec, _series} = metric, cache) do + format_standard(metric, spec.reporter_options[:prometheus_type] || "counter", cache) + end + + defp format({%LastValue{} = spec, _series} = metric, cache) do + format_standard(metric, spec.reporter_options[:prometheus_type] || "gauge", cache) + end + + defp format({%Distribution{} = metric, tagged_series}, cache) do + name = format_name(metric.name, cache) + help = ["# HELP ", name, " ", escape_help(metric.description)] + type = ["# TYPE ", name, " histogram"] + + distributions = + Enum.map(tagged_series, fn {tags, buckets} -> + format_distribution(name, tags, buckets, cache) + end) + + [help, ?\n, type, ?\n, distributions] + end + + defp format_distribution(name, tags, buckets, cache) do + has_labels? = not Enum.empty?(tags) + + buckets_as_floats = + Map.drop(buckets, [:sum, :infinity]) + |> Enum.map(fn {bucket_string, count} -> {String.to_float(bucket_string), count} end) + |> Enum.sort() + + {prefix_sums, count} = prefix_sums(buckets_as_floats) + + {labels_done, bucket_partial} = + if has_labels? do + labels = format_labels(tags, cache) + {[?{, labels, "} "], [name, "_bucket{", labels, ",le=\""]} + else + {?\s, [name, "_bucket{le=\""]} + end + + samples = + prefix_sums + |> Enum.map(fn {upper_bound, count} -> + [bucket_partial, format_value(upper_bound), "\"} ", Integer.to_string(count), ?\n] + end) + + sum = Map.get(buckets, :sum, 0) + inf = Map.get(buckets, :infinity, 0) + + [ + samples, + [bucket_partial, "+Inf\"} ", Integer.to_string(count + inf), ?\n], + [name, "_sum", labels_done, Integer.to_string(sum), ?\n], + [name, "_count", labels_done, Integer.to_string(count + inf), ?\n] + ] + end + + defp format_standard({metric, series}, type, cache) do + name = format_name(metric.name, cache) + help = ["# HELP ", name, " ", escape_help(metric.description)] + type = ["# TYPE ", name, " ", to_string(type)] + + samples = + Enum.map(series, fn {labels, value} -> + has_labels? = not Enum.empty?(labels) + + if has_labels? do + [name, ?{, format_labels(labels, cache), ?}, " ", format_value(value), ?\n] + else + [name, " ", format_value(value), ?\n] + end + end) + + [help, ?\n, type, ?\n, samples] + end + + defp format_labels(labels, cache) do + labels + |> Enum.sort() + |> Enum.map_intersperse(?,, fn {k, v} -> [to_string(k), "=\"", escape(v, cache), ?"] end) + end + + defp format_name(name, cache) do + case :ets.lookup_element(cache, name, 2, nil) do + nil -> + result = + name + |> Enum.join("_") + |> format_name_start() + |> IO.iodata_to_binary() + + :ets.insert(cache, {name, result}) + result + + result -> + result + end + end + + # Name must start with an ascii letter + defp format_name_start(<>) when h not in ?A..?Z and h not in ?a..?z, + do: format_name_start(rest) + + defp format_name_start(<>), + do: format_name_rest(rest, <<>>) + + # Otherwise only letters, numbers, or _ + defp format_name_rest(<>, acc) + when h in ?A..?Z or h in ?a..?z or h in ?0..?9 or h == ?_, + do: format_name_rest(rest, [acc, h]) + + defp format_name_rest(<<_, rest::binary>>, acc), do: format_name_rest(rest, acc) + defp format_name_rest(<<>>, acc), do: acc + + defp format_value(true), do: "1" + defp format_value(false), do: "0" + defp format_value(nil), do: "0" + defp format_value(n) when is_integer(n), do: Integer.to_string(n) + defp format_value(f) when is_float(f), do: Float.to_string(f) + + defp escape(nil, _cache), do: "nil" + + defp escape(value, cache) do + case :ets.lookup_element(cache, value, 2, nil) do + nil -> + result = + value + |> safe_to_string() + |> do_escape(<<>>) + |> IO.iodata_to_binary() + + :ets.insert(cache, {value, result}) + result + + result -> + result + end + end + + defp safe_to_string(value) do + case String.Chars.impl_for(value) do + nil -> inspect(value) + _ -> to_string(value) + end + end + + defp do_escape(<>, acc), do: do_escape(rest, [acc, ?\\, ?\"]) + defp do_escape(<>, acc), do: do_escape(rest, [acc, ?\\, ?\\]) + defp do_escape(<>, acc), do: do_escape(rest, [acc, ?\\, ?n]) + defp do_escape(<>, acc), do: do_escape(rest, [acc, h]) + defp do_escape(<<>>, acc), do: acc + + defp escape_help(value) do + value + |> to_string() + |> escape_help(<<>>) + end + + defp escape_help(<>, acc), do: escape_help(rest, <>) + defp escape_help(<>, acc), do: escape_help(rest, <>) + defp escape_help(<>, acc), do: escape_help(rest, <>) + defp escape_help(<<>>, acc), do: acc + + defp prefix_sums(buckets), do: prefix_sums(buckets, [], 0) + defp prefix_sums([], acc, sum), do: {Enum.reverse(acc), sum} + + defp prefix_sums([{bucket, count} | rest], acc, sum) do + new_sum = sum + count + new_bucket = {bucket, new_sum} + prefix_sums(rest, [new_bucket | acc], new_sum) + end +end diff --git a/lib/realtime/nodes.ex b/lib/realtime/nodes.ex index ae237eb5f..e57b9c7e0 100644 --- a/lib/realtime/nodes.ex +++ b/lib/realtime/nodes.ex @@ -64,6 +64,27 @@ defmodule Realtime.Nodes do def region_nodes(nil), do: [] + @doc """ + Picks a node from a region based on the provided key + """ + @spec node_from_region(String.t(), term()) :: {:ok, node} | {:error, :not_available} + def node_from_region(region, key) when is_binary(region) do + nodes = region_nodes(region) + + case nodes do + [] -> + {:error, :not_available} + + _ -> + member_count = Enum.count(nodes) + index = :erlang.phash2(key, member_count) + + {:ok, Enum.fetch!(nodes, index)} + end + end + + def node_from_region(_, _), do: {:error, :not_available} + @doc """ Picks the node to launch the Postgres connection on. @@ -73,11 +94,6 @@ defmodule Realtime.Nodes do @spec launch_node(String.t(), String.t() | nil, atom()) :: atom() def launch_node(tenant_id, region, default) do case region_nodes(region) do - [node] -> - Logger.warning("Only one region node (#{inspect(node)}) for #{region} using default #{inspect(default)}") - - default - [] -> Logger.warning("Zero region nodes for #{region} using #{inspect(default)}") default @@ -105,7 +121,7 @@ defmodule Realtime.Nodes do iex> node = :"pink@127.0.0.1" iex> Realtime.Helpers.short_node_id_from_name(node) - "127.0.0.1" + "pink@127.0.0.1" iex> node = :"pink@10.0.1.1" iex> Realtime.Helpers.short_node_id_from_name(node) @@ -124,64 +140,15 @@ defmodule Realtime.Nodes do [_, _, _, _, _, one, two, _] -> one <> two + ["127.0.0.1"] -> + Atom.to_string(name) + _other -> host end end - @mapping_realtime_region_to_tenant_region_aws %{ - "ap-southeast-1" => [ - "ap-east-1", - "ap-northeast-1", - "ap-northeast-2", - "ap-south-1", - "ap-southeast-1" - ], - "ap-southeast-2" => ["ap-southeast-2"], - "eu-west-2" => [ - "eu-central-1", - "eu-central-2", - "eu-north-1", - "eu-west-1", - "eu-west-2", - "eu-west-3" - ], - "us-east-1" => [ - "ca-central-1", - "sa-east-1", - "us-east-1", - "us-east-2" - ], - "us-west-1" => ["us-west-1", "us-west-2"] - } - @mapping_realtime_region_to_tenant_region_fly %{ - "iad" => ["ca-central-1", "sa-east-1", "us-east-1"], - "lhr" => ["eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3"], - "sea" => ["us-west-1"], - "syd" => [ - "ap-east-1", - "ap-northeast-1", - "ap-northeast-2", - "ap-south-1", - "ap-southeast-1", - "ap-southeast-2" - ] - } - - @doc """ - Fetches the tenant regions for a given realtime reagion - """ - @spec region_to_tenant_regions(String.t()) :: list() | nil - def region_to_tenant_regions(region) do - platform = Application.get_env(:realtime, :platform) - - mappings = - case platform do - :aws -> @mapping_realtime_region_to_tenant_region_aws - :fly -> @mapping_realtime_region_to_tenant_region_fly - _ -> %{} - end - - Map.get(mappings, region) - end + @spec all_node_regions() :: [String.t()] + @doc "List all the regions where nodes can be launched" + def all_node_regions(), do: :syn.group_names(RegionNodes) end diff --git a/lib/realtime/operations.ex b/lib/realtime/operations.ex index 76efa38fb..e17bf249e 100644 --- a/lib/realtime/operations.ex +++ b/lib/realtime/operations.ex @@ -9,7 +9,9 @@ defmodule Realtime.Operations do """ def rebalance do Enum.reduce(:syn.group_names(:users), 0, fn tenant, acc -> - case :syn.lookup(Extensions.PostgresCdcRls, tenant) do + scope = Realtime.Syn.PostgresCdc.scope(tenant) + + case :syn.lookup(scope, tenant) do {pid, %{region: region}} -> platform_region = Realtime.Nodes.platform_region_translator(region) current_node = node(pid) diff --git a/lib/realtime/postgres_cdc.ex b/lib/realtime/postgres_cdc.ex index eef81a1ec..9a4350983 100644 --- a/lib/realtime/postgres_cdc.ex +++ b/lib/realtime/postgres_cdc.ex @@ -16,8 +16,8 @@ defmodule Realtime.PostgresCdc do apply(module, :handle_connect, [opts]) end - def after_connect(module, connect_response, extension, params) do - apply(module, :handle_after_connect, [connect_response, extension, params]) + def after_connect(module, connect_response, extension, params, tenant) do + apply(module, :handle_after_connect, [connect_response, extension, params, tenant]) end def subscribe(module, pg_change_params, tenant, metadata) do @@ -80,7 +80,8 @@ defmodule Realtime.PostgresCdc do end @callback handle_connect(any()) :: {:ok, any()} | nil - @callback handle_after_connect(any(), any(), any()) :: {:ok, any()} | {:error, any()} + @callback handle_after_connect(any(), any(), any(), tenant_id :: String.t()) :: + {:ok, any()} | {:error, any()} | {:error, any(), any()} @callback handle_subscribe(any(), any(), any()) :: :ok @callback handle_stop(any(), any()) :: any() end diff --git a/lib/realtime/rate_counter/rate_counter.ex b/lib/realtime/rate_counter/rate_counter.ex index d489b86f2..f05b52330 100644 --- a/lib/realtime/rate_counter/rate_counter.ex +++ b/lib/realtime/rate_counter/rate_counter.ex @@ -20,7 +20,7 @@ defmodule Realtime.RateCounter do defstruct id: nil, opts: [] end - @idle_shutdown :timer.minutes(15) + @idle_shutdown :timer.minutes(10) @tick :timer.seconds(1) @max_bucket_len 60 @cache __MODULE__ diff --git a/lib/realtime/repo.ex b/lib/realtime/repo.ex index f3850712a..5375c2c97 100644 --- a/lib/realtime/repo.ex +++ b/lib/realtime/repo.ex @@ -1,12 +1,8 @@ defmodule Realtime.Repo do - use Realtime.Logs - use Ecto.Repo, otp_app: :realtime, adapter: Ecto.Adapters.Postgres - import Ecto.Query - def with_dynamic_repo(config, callback) do default_dynamic_repo = get_dynamic_repo() {:ok, repo} = [name: nil, pool_size: 2] |> Keyword.merge(config) |> Realtime.Repo.start_link() @@ -19,244 +15,4 @@ defmodule Realtime.Repo do Supervisor.stop(repo) end end - - @doc """ - Lists all records for a given query and converts them into a given struct - """ - @spec all(DBConnection.conn(), Ecto.Queryable.t(), module(), [Postgrex.execute_option()]) :: - {:ok, list(struct())} | {:error, any()} - def all(conn, query, result_struct, opts \\ []) do - conn - |> run_all_query(query, opts) - |> result_to_structs(result_struct) - end - - @doc """ - Fetches one record for a given query and converts it into a given struct - """ - @spec one( - DBConnection.conn(), - Ecto.Query.t(), - module(), - Postgrex.option() | Keyword.t() - ) :: - {:error, any()} | {:ok, struct()} | Ecto.Changeset.t() - def one(conn, query, result_struct, opts \\ []) do - conn - |> run_all_query(query, opts) - |> result_to_single_struct(result_struct, nil) - end - - @doc """ - Inserts a given changeset into the database and converts the result into a given struct - """ - @spec insert( - DBConnection.conn(), - Ecto.Changeset.t(), - module(), - Postgrex.option() | Keyword.t() - ) :: - {:ok, struct()} | {:error, any()} | Ecto.Changeset.t() - def insert(conn, changeset, result_struct, opts \\ []) do - with {:ok, {query, args}} <- insert_query_from_changeset(changeset) do - conn - |> run_query_with_trap(query, args, opts) - |> result_to_single_struct(result_struct, changeset) - end - end - - @doc """ - Inserts all changesets into the database and converts the result into a given list of structs - """ - @spec insert_all_entries( - DBConnection.conn(), - [Ecto.Changeset.t()], - module(), - Postgrex.option() | Keyword.t() - ) :: - {:ok, [struct()]} | {:error, any()} | Ecto.Changeset.t() - def insert_all_entries(conn, changesets, result_struct, opts \\ []) do - with {:ok, {query, args}} <- insert_all_query_from_changeset(changesets) do - conn - |> run_query_with_trap(query, args, opts) - |> result_to_structs(result_struct) - end - end - - @doc """ - Deletes records for a given query and returns the number of deleted records - """ - @spec del(DBConnection.conn(), Ecto.Queryable.t()) :: - {:ok, non_neg_integer()} | {:error, any()} - def del(conn, query) do - with {:ok, %Postgrex.Result{num_rows: num_rows}} <- run_delete_query(conn, query) do - {:ok, num_rows} - end - end - - @doc """ - Updates an entry based on the changeset and returns the updated entry - """ - @spec update(DBConnection.conn(), Ecto.Changeset.t(), module()) :: - {:ok, struct()} | {:error, any()} | Ecto.Changeset.t() - def update(conn, changeset, result_struct, opts \\ []) do - with {:ok, {query, args}} <- update_query_from_changeset(changeset) do - conn - |> run_query_with_trap(query, args, opts) - |> result_to_single_struct(result_struct, changeset) - end - end - - defp result_to_single_struct( - {:error, %Postgrex.Error{postgres: %{code: :unique_violation, constraint: "channels_name_index"}}}, - _struct, - changeset - ) do - Ecto.Changeset.add_error(changeset, :name, "has already been taken") - end - - defp result_to_single_struct({:error, _} = error, _, _), do: error - - defp result_to_single_struct({:ok, %Postgrex.Result{rows: []}}, _, _) do - {:error, :not_found} - end - - defp result_to_single_struct({:ok, %Postgrex.Result{rows: [row], columns: columns}}, struct, _) do - {:ok, load(struct, Enum.zip(columns, row))} - end - - defp result_to_single_struct({:ok, %Postgrex.Result{num_rows: num_rows}}, _, _) do - raise("expected at most one result but got #{num_rows} in result") - end - - defp result_to_structs({:error, _} = error, _), do: error - - defp result_to_structs({:ok, %Postgrex.Result{rows: rows, columns: columns}}, struct) do - {:ok, Enum.map(rows, &load(struct, Enum.zip(columns, &1)))} - end - - defp insert_query_from_changeset(%{valid?: false} = changeset), do: {:error, changeset} - - defp insert_query_from_changeset(changeset) do - schema = changeset.data.__struct__ - source = schema.__schema__(:source) - prefix = schema.__schema__(:prefix) - acc = %{header: [], rows: []} - - %{header: header, rows: rows} = - Enum.reduce(changeset.changes, acc, fn {field, row}, %{header: header, rows: rows} -> - row = - case row do - row when is_boolean(row) -> row - row when is_atom(row) -> Atom.to_string(row) - _ -> row - end - - %{ - header: [Atom.to_string(field) | header], - rows: [row | rows] - } - end) - - table = "\"#{prefix}\".\"#{source}\"" - header = "(#{Enum.map_join(header, ",", &"\"#{&1}\"")})" - - arg_index = - rows - |> Enum.with_index(1) - |> Enum.map_join(",", fn {_, index} -> "$#{index}" end) - - {:ok, {"INSERT INTO #{table} #{header} VALUES (#{arg_index}) RETURNING *", rows}} - end - - defp insert_all_query_from_changeset(changesets) do - invalid = Enum.filter(changesets, &(!&1.valid?)) - - if invalid != [] do - {:error, changesets} - else - [schema] = changesets |> Enum.map(& &1.data.__struct__) |> Enum.uniq() - - source = schema.__schema__(:source) - prefix = schema.__schema__(:prefix) - changes = Enum.map(changesets, & &1.changes) - - %{header: header, rows: rows} = - Enum.reduce(changes, %{header: [], rows: []}, fn v, changes_acc -> - Enum.reduce(v, changes_acc, fn {field, row}, %{header: header, rows: rows} -> - row = - case row do - row when is_boolean(row) -> row - row when is_atom(row) -> Atom.to_string(row) - _ -> row - end - - %{ - header: Enum.uniq([Atom.to_string(field) | header]), - rows: [row | rows] - } - end) - end) - - args_index = - rows - |> Enum.chunk_every(length(header)) - |> Enum.reduce({"", 1}, fn row, {acc, count} -> - arg_index = - row - |> Enum.with_index(count) - |> Enum.map_join("", fn {_, index} -> "$#{index}," end) - |> String.trim_trailing(",") - |> then(&"(#{&1})") - - {"#{acc},#{arg_index}", count + length(row)} - end) - |> elem(0) - |> String.trim_leading(",") - - table = "\"#{prefix}\".\"#{source}\"" - header = "(#{Enum.map_join(header, ",", &"\"#{&1}\"")})" - {:ok, {"INSERT INTO #{table} #{header} VALUES #{args_index} RETURNING *", rows}} - end - end - - defp update_query_from_changeset(%{valid?: false} = changeset), do: {:error, changeset} - - defp update_query_from_changeset(changeset) do - %Ecto.Changeset{data: %{id: id, __struct__: struct}, changes: changes} = changeset - changes = Keyword.new(changes) - query = from(c in struct, where: c.id == ^id, select: c, update: [set: ^changes]) - {:ok, to_sql(:update_all, query)} - end - - defp run_all_query(conn, query, opts) do - {query, args} = to_sql(:all, query) - run_query_with_trap(conn, query, args, opts) - end - - defp run_delete_query(conn, query) do - {query, args} = to_sql(:delete_all, query) - run_query_with_trap(conn, query, args) - end - - defp run_query_with_trap(conn, query, args, opts \\ []) do - Postgrex.query(conn, query, args, opts) - rescue - e -> - log_error("ErrorRunningQuery", e) - {:error, :postgrex_exception} - catch - :exit, {:noproc, {DBConnection.Holder, :checkout, _}} -> - log_error( - "UnableCheckoutConnection", - "Unable to checkout connection, please check your connection pool configuration" - ) - - {:error, :postgrex_exception} - - :exit, reason -> - log_error("UnknownError", reason) - - {:error, :postgrex_exception} - end end diff --git a/lib/realtime/repo_replica.ex b/lib/realtime/repo_replica.ex index 8079ccb8e..2a957c439 100644 --- a/lib/realtime/repo_replica.ex +++ b/lib/realtime/repo_replica.ex @@ -45,17 +45,19 @@ defmodule Realtime.Repo.Replica do end region = Application.get_env(:realtime, :region) + master_region = Application.get_env(:realtime, :master_region) || region replica = Map.get(replicas, region) replica_conf = Application.get_env(:realtime, replica) # Do not create module if replica isn't set or configuration is not present cond do is_nil(replica) -> - Logger.info("Replica region not found, defaulting to Realtime.Repo") Realtime.Repo is_nil(replica_conf) -> - Logger.info("Replica config not found for #{region} region") + Realtime.Repo + + region == master_region -> Realtime.Repo true -> diff --git a/lib/realtime/rpc.ex b/lib/realtime/rpc.ex index c63b29f08..7e4095b95 100644 --- a/lib/realtime/rpc.ex +++ b/lib/realtime/rpc.ex @@ -10,14 +10,13 @@ defmodule Realtime.Rpc do """ @spec call(atom(), atom(), atom(), any(), keyword()) :: any() def call(node, mod, func, args, opts \\ []) do - tenant_id = Keyword.get(opts, :tenant_id) timeout = Keyword.get(opts, :timeout, Application.get_env(:realtime, :rpc_timeout)) {latency, response} = :timer.tc(fn -> :rpc.call(node, mod, func, args, timeout) end) Telemetry.execute( [:realtime, :rpc], %{latency: latency}, - %{mod: mod, func: func, target_node: node, origin_node: node(), mechanism: :rpc, tenant: tenant_id, success: nil} + %{mod: mod, func: func, target_node: node, origin_node: node(), mechanism: :rpc, success: nil} ) response @@ -45,7 +44,6 @@ defmodule Realtime.Rpc do target_node: node, origin_node: node(), success: true, - tenant: tenant_id, mechanism: :erpc } ) @@ -62,7 +60,6 @@ defmodule Realtime.Rpc do target_node: node, origin_node: node(), success: false, - tenant: tenant_id, mechanism: :erpc } ) @@ -87,7 +84,6 @@ defmodule Realtime.Rpc do target_node: node, origin_node: node(), success: false, - tenant: tenant_id, mechanism: :erpc } ) diff --git a/lib/realtime/syn/postgres_cdc.ex b/lib/realtime/syn/postgres_cdc.ex new file mode 100644 index 000000000..3b4dd6541 --- /dev/null +++ b/lib/realtime/syn/postgres_cdc.ex @@ -0,0 +1,23 @@ +defmodule Realtime.Syn.PostgresCdc do + @moduledoc """ + Scope for the PostgresCdc module. + """ + + @doc """ + Returns the scope for a given tenant id. + """ + @spec scope(String.t()) :: atom() + def scope(tenant_id) do + shards = Application.fetch_env!(:realtime, :postgres_cdc_scope_shards) + shard = :erlang.phash2(tenant_id, shards) + :"realtime_postgres_cdc_#{shard}" + end + + def scopes() do + shards = Application.fetch_env!(:realtime, :postgres_cdc_scope_shards) + Enum.map(0..(shards - 1), fn shard -> :"realtime_postgres_cdc_#{shard}" end) + end + + def syn_topic_prefix(), do: "realtime_postgres_cdc_" + def syn_topic(tenant_id), do: "#{syn_topic_prefix()}#{tenant_id}" +end diff --git a/lib/realtime/syn_handler.ex b/lib/realtime/syn_handler.ex index 397c8cf8f..66e92ab88 100644 --- a/lib/realtime/syn_handler.ex +++ b/lib/realtime/syn_handler.ex @@ -3,24 +3,31 @@ defmodule Realtime.SynHandler do Custom defined Syn's callbacks """ require Logger - alias Extensions.PostgresCdcRls - alias RealtimeWeb.Endpoint + alias Realtime.Syn.PostgresCdc alias Realtime.Tenants.Connect + alias RealtimeWeb.Endpoint @behaviour :syn_event_handler + @postgres_cdc_scope_prefix PostgresCdc.syn_topic_prefix() + @impl true - def on_registry_process_updated(Connect, tenant_id, _pid, %{conn: conn}, :normal) when is_pid(conn) do + def on_registry_process_updated(Connect, tenant_id, pid, %{conn: conn}, :normal) when is_pid(conn) do # Update that a database connection is ready - Endpoint.local_broadcast(Connect.syn_topic(tenant_id), "ready", %{conn: conn}) + Endpoint.local_broadcast(Connect.syn_topic(tenant_id), "ready", %{pid: pid, conn: conn}) end - def on_registry_process_updated(PostgresCdcRls, tenant_id, _pid, meta, _reason) do - # Update that the CdCRls connection is ready - Endpoint.local_broadcast(PostgresCdcRls.syn_topic(tenant_id), "ready", meta) - end + def on_registry_process_updated(scope, tenant_id, _pid, meta, _reason) do + scope = Atom.to_string(scope) - def on_registry_process_updated(_scope, _name, _pid, _meta, _reason), do: :ok + case scope do + @postgres_cdc_scope_prefix <> _ -> + Endpoint.local_broadcast(PostgresCdc.syn_topic(tenant_id), "ready", meta) + + _ -> + :ok + end + end @doc """ When processes registered with :syn are unregistered, either manually or by stopping, this @@ -32,13 +39,18 @@ defmodule Realtime.SynHandler do was started, and subsequently stopped because :syn handled the conflict. """ @impl true - def on_process_unregistered(mod, name, pid, _meta, reason) do - if reason == :syn_conflict_resolution do - log("#{mod} terminated due to syn conflict resolution: #{inspect(name)} #{inspect(pid)}") + def on_process_unregistered(scope, name, pid, _meta, reason) do + case Atom.to_string(scope) do + @postgres_cdc_scope_prefix <> _ = scope -> + Endpoint.local_broadcast(PostgresCdc.syn_topic(name), scope <> "_down", %{pid: pid, reason: reason}) + + _ -> + topic = topic(scope) + Endpoint.local_broadcast(topic <> ":" <> name, topic <> "_down", %{pid: pid, reason: reason}) end - topic = topic(mod) - Endpoint.local_broadcast(topic <> ":" <> name, topic <> "_down", nil) + if reason == :syn_conflict_resolution, + do: log("#{scope} terminated due to syn conflict resolution: #{inspect(name)} #{inspect(pid)}") :ok end @@ -53,19 +65,19 @@ defmodule Realtime.SynHandler do If it times out an exit with reason :kill that can't be trapped """ @impl true - def resolve_registry_conflict(mod, name, {pid1, _meta1, time1}, {pid2, _meta2, time2}) do - {pid_to_keep, pid_to_stop} = decide(pid1, time1, pid2, time2) + def resolve_registry_conflict(mod, name, {pid1, _meta1, _time1}, {pid2, _meta2, _time2}) do + {pid_to_keep, pid_to_stop} = decide(pid1, pid2, name) # Is this function running on the node that should stop? if node(pid_to_stop) == node() do log( - "Resolving conflict on scope #{inspect(mod)} for name #{inspect(name)} {#{inspect(pid1)}, #{time1}} vs {#{inspect(pid2)}, #{time2}}, stop local process: #{inspect(pid_to_stop)}" + "Resolving conflict on scope #{inspect(mod)} for name #{inspect(name)} {#{node(pid1)}, #{inspect(pid1)}} vs {#{node(pid2)}, #{inspect(pid2)}}, stop local process: #{inspect(pid_to_stop)}" ) stop(pid_to_stop) else log( - "Resolving conflict on scope #{inspect(mod)} for name #{inspect(name)} {#{inspect(pid1)}, #{time1}} vs {#{inspect(pid2)}, #{time2}}, remote process will be stopped: #{inspect(pid_to_stop)}" + "Resolving conflict on scope #{inspect(mod)} for name #{inspect(name)} {#{node(pid1)}, #{inspect(pid1)}} vs {#{node(pid2)}, #{inspect(pid2)}}, remote process will be stopped: #{inspect(pid_to_stop)}" ) end @@ -90,23 +102,26 @@ defmodule Realtime.SynHandler do defp log(message), do: Logger.warning("SynHandler(#{node()}): #{message}") - # If the time on both pids are exactly the same - # we compare the node names and pick one consistently - # Node names are necessarily unique - defp decide(pid1, time1, pid2, time2) when time1 == time2 do - if node(pid1) < node(pid2) do - {pid1, pid2} - else - {pid2, pid1} - end - end - - defp decide(pid1, time1, pid2, time2) do - # We pick the one that started first. - if time1 < time2 do - {pid1, pid2} + # We use node and the name to decide who lives and who dies + # This way both nodes will always agree on the same outcome + # regardless of timing issues + defp decide(pid1, pid2, name) do + # We hash the name to not always pick one specific node when a conflict happens + # between these 2 nodes + hash = :erlang.phash2(name, 2) + + if hash == 1 do + if node(pid1) < node(pid2) do + {pid1, pid2} + else + {pid2, pid1} + end else - {pid2, pid1} + if node(pid1) < node(pid2) do + {pid2, pid1} + else + {pid1, pid2} + end end end diff --git a/lib/realtime/telemetry/logger.ex b/lib/realtime/telemetry/logger.ex index cbc0c6cc4..5a290f01c 100644 --- a/lib/realtime/telemetry/logger.ex +++ b/lib/realtime/telemetry/logger.ex @@ -28,7 +28,6 @@ defmodule Realtime.Telemetry.Logger do @doc """ Logs billing metrics for a tenant aggregated and emitted by a PromEx metric poller. """ - def handle_event(event, measurements, %{tenant: tenant}, _config) do meta = %{project: tenant, measurements: measurements} Logger.info(["Billing metrics: ", inspect(event)], meta) diff --git a/lib/realtime/tenants.ex b/lib/realtime/tenants.ex index 63965abea..2cf2dec34 100644 --- a/lib/realtime/tenants.ex +++ b/lib/realtime/tenants.ex @@ -9,7 +9,6 @@ defmodule Realtime.Tenants do alias Realtime.Api.Tenant alias Realtime.Database alias Realtime.RateCounter - alias Realtime.Repo alias Realtime.Repo.Replica alias Realtime.Tenants.Cache alias Realtime.Tenants.Connect @@ -21,7 +20,8 @@ defmodule Realtime.Tenants do """ @spec list_connected_tenants(atom()) :: [String.t()] def list_connected_tenants(node) do - :syn.group_names(:users, node) + UsersCounter.scopes() + |> Enum.flat_map(fn scope -> :syn.group_names(scope, node) end) end @doc """ @@ -98,13 +98,11 @@ defmodule Realtime.Tenants do connected_cluster when is_integer(connected_cluster) -> tenant = Cache.get_tenant_by_external_id(external_id) - {:ok, db_conn} = Database.connect(tenant, "realtime_health_check") - Process.alive?(db_conn) && GenServer.stop(db_conn) - Migrations.run_migrations(tenant) + result? = Migrations.run_migrations(tenant) {:ok, %{ - healthy: true, + healthy: result? == :ok || result? == :noop, db_connected: false, connected_cluster: connected_cluster, region: region, @@ -232,16 +230,30 @@ defmodule Realtime.Tenants do end @doc "RateCounter arguments for counting database events per second." - @spec db_events_per_second_rate(Tenant.t() | String.t()) :: RateCounter.Args.t() - def db_events_per_second_rate(%Tenant{} = tenant), do: db_events_per_second_rate(tenant.external_id) + @spec db_events_per_second_rate(Tenant.t()) :: RateCounter.Args.t() + def db_events_per_second_rate(%Tenant{} = tenant), + do: db_events_per_second_rate(tenant.external_id, tenant.max_events_per_second) - def db_events_per_second_rate(tenant_id) when is_binary(tenant_id) do + @doc "RateCounter arguments for counting database events per second with a limit." + @spec db_events_per_second_rate(String.t(), non_neg_integer) :: RateCounter.Args.t() + def db_events_per_second_rate(tenant_id, max_events_per_second) when is_binary(tenant_id) do opts = [ telemetry: %{ event_name: [:channel, :db_events], measurements: %{}, metadata: %{tenant: tenant_id} - } + }, + limit: [ + value: max_events_per_second, + measurement: :avg, + log: true, + log_fn: fn -> + Logger.error("MessagePerSecondRateLimitReached: Too many postgres changes messages per second", + external_id: tenant_id, + project: tenant_id + ) + end + ] ] %RateCounter.Args{id: db_events_per_second_key(tenant_id), opts: opts} @@ -314,7 +326,7 @@ defmodule Realtime.Tenants do opts = [ max_bucket_len: 30, limit: [ - value: pool_size(tenant), + value: authorization_pool_size(tenant), measurement: :sum, log_fn: fn -> Logger.critical("IncreaseConnectionPool: Too many database timeouts", @@ -328,11 +340,56 @@ defmodule Realtime.Tenants do %RateCounter.Args{id: {:channel, :authorization_errors, external_id}, opts: opts} end - defp pool_size(%{extensions: [%{settings: settings} | _]}) do + @spec subscription_errors_per_second_rate(String.t(), non_neg_integer) :: RateCounter.Args.t() + def subscription_errors_per_second_rate(tenant_id, pool_size) do + opts = [ + max_bucket_len: 30, + limit: [ + value: pool_size, + measurement: :sum, + log_fn: fn -> + Logger.error("IncreaseSubscriptionConnectionPool: Too many database timeouts", + external_id: tenant_id, + project: tenant_id + ) + end + ] + ] + + %RateCounter.Args{id: {:channel, :subscription_errors, tenant_id}, opts: opts} + end + + @connect_errors_per_second_default 10 + @doc "RateCounter arguments for counting connect per second." + @spec connect_errors_per_second_rate(Tenant.t() | String.t()) :: RateCounter.Args.t() + def connect_errors_per_second_rate(%Tenant{external_id: external_id}) do + connect_errors_per_second_rate(external_id) + end + + def connect_errors_per_second_rate(tenant_id) do + opts = [ + max_bucket_len: 30, + limit: [ + value: @connect_errors_per_second_default, + measurement: :sum, + log_fn: fn -> + Logger.critical( + "DatabaseConnectionRateLimitReached: Too many connection attempts against the tenant database", + external_id: tenant_id, + project: tenant_id + ) + end + ] + ] + + %RateCounter.Args{id: {:database, :connect, tenant_id}, opts: opts} + end + + defp authorization_pool_size(%{extensions: [%{settings: settings} | _]}) do Database.pool_size_by_application_name("realtime_connect", settings) end - defp pool_size(_), do: 1 + defp authorization_pool_size(_), do: 1 @spec get_tenant_limits(Realtime.Api.Tenant.t(), maybe_improper_list) :: list def get_tenant_limits(%Tenant{} = tenant, keys) when is_list(keys) do @@ -399,8 +456,7 @@ defmodule Realtime.Tenants do @spec suspend_tenant_by_external_id(String.t()) :: {:ok, Tenant.t()} | {:error, term()} def suspend_tenant_by_external_id(external_id) do external_id - |> Cache.get_tenant_by_external_id() - |> Api.update_tenant(%{suspend: true}) + |> Api.update_tenant_by_external_id(%{suspend: true}) |> tap(fn _ -> broadcast_operation_event(:suspend_tenant, external_id) end) end @@ -410,30 +466,18 @@ defmodule Realtime.Tenants do @spec unsuspend_tenant_by_external_id(String.t()) :: {:ok, Tenant.t()} | {:error, term()} def unsuspend_tenant_by_external_id(external_id) do external_id - |> Cache.get_tenant_by_external_id() - |> Api.update_tenant(%{suspend: false}) + |> Api.update_tenant_by_external_id(%{suspend: false}) |> tap(fn _ -> broadcast_operation_event(:unsuspend_tenant, external_id) end) end @doc """ Checks if migrations for a given tenant need to run. """ - @spec run_migrations?(Tenant.t()) :: boolean() - def run_migrations?(%Tenant{} = tenant) do - tenant.migrations_ran < Enum.count(Migrations.migrations()) - end + @spec run_migrations?(Tenant.t() | integer()) :: boolean() + def run_migrations?(%Tenant{} = tenant), do: run_migrations?(tenant.migrations_ran) - @doc """ - Updates the migrations_ran field for a tenant. - """ - @spec update_migrations_ran(binary(), integer()) :: {:ok, Tenant.t()} | {:error, term()} - def update_migrations_ran(external_id, count) do - external_id - |> Cache.get_tenant_by_external_id() - |> Tenant.changeset(%{migrations_ran: count}) - |> Repo.update!() - |> tap(fn _ -> Cache.distributed_invalidate_tenant_cache(external_id) end) - end + def run_migrations?(migrations_ran) when is_integer(migrations_ran), + do: migrations_ran < Enum.count(Migrations.migrations()) @doc """ Broadcasts an operation event to the tenant's operations channel. @@ -449,4 +493,20 @@ defmodule Realtime.Tenants do @spec region(Tenant.t()) :: String.t() | nil def region(%Tenant{extensions: [%{settings: settings}]}), do: Map.get(settings, "region") def region(_), do: nil + + @doc """ + """ + @spec validate_payload_size(Tenant.t() | binary(), map()) :: :ok | {:error, :payload_size_exceeded} + def validate_payload_size(tenant_id, payload) when is_binary(tenant_id) do + tenant_id + |> Cache.get_tenant_by_external_id() + |> validate_payload_size(payload) + end + + @payload_size_padding 500 + def validate_payload_size(%Tenant{max_payload_size_in_kb: max_payload_size_in_kb}, payload) do + max_payload_size = max_payload_size_in_kb * 1000 + @payload_size_padding + payload_size = :erlang.external_size(payload) + if payload_size > max_payload_size, do: {:error, :payload_size_exceeded}, else: :ok + end end diff --git a/lib/realtime/tenants/authorization.ex b/lib/realtime/tenants/authorization.ex index da7093f61..2b4338e61 100644 --- a/lib/realtime/tenants/authorization.ex +++ b/lib/realtime/tenants/authorization.ex @@ -17,7 +17,7 @@ defmodule Realtime.Tenants.Authorization do alias Realtime.Database alias Realtime.GenCounter alias Realtime.GenRpc - alias Realtime.Repo + alias Realtime.Tenants.Repo alias Realtime.Tenants.Authorization.Policies defstruct [:tenant_id, :topic, :headers, :jwt, :claims, :role, :sub] diff --git a/lib/realtime/tenants/batch_broadcast.ex b/lib/realtime/tenants/batch_broadcast.ex index 4fc31aa0f..18b5823f1 100644 --- a/lib/realtime/tenants/batch_broadcast.ex +++ b/lib/realtime/tenants/batch_broadcast.ex @@ -29,9 +29,11 @@ defmodule Realtime.Tenants.BatchBroadcast do @spec broadcast( auth_params :: map() | nil, tenant :: Tenant.t(), - messages :: %{messages: list(%{topic: String.t(), payload: map(), event: String.t(), private: boolean()})}, + messages :: %{ + messages: list(%{id: String.t(), topic: String.t(), payload: map(), event: String.t(), private: boolean()}) + }, super_user :: boolean() - ) :: :ok | {:error, atom()} + ) :: :ok | {:error, atom() | Ecto.Changeset.t()} def broadcast(auth_params, tenant, messages, super_user \\ false) def broadcast(%Plug.Conn{} = conn, %Tenant{} = tenant, messages, super_user) do @@ -47,7 +49,7 @@ defmodule Realtime.Tenants.BatchBroadcast do end def broadcast(auth_params, %Tenant{} = tenant, messages, super_user) do - with %Ecto.Changeset{valid?: true} = changeset <- changeset(%__MODULE__{}, messages), + with %Ecto.Changeset{valid?: true} = changeset <- changeset(%__MODULE__{}, messages, tenant), %Ecto.Changeset{changes: %{messages: messages}} = changeset, events_per_second_rate = Tenants.events_per_second_rate(tenant), :ok <- check_rate_limit(events_per_second_rate, tenant, length(messages)) do @@ -59,8 +61,8 @@ defmodule Realtime.Tenants.BatchBroadcast do # Handle events for public channel events |> Map.get(false, []) - |> Enum.each(fn %{topic: sub_topic, payload: payload, event: event} -> - send_message_and_count(tenant, events_per_second_rate, sub_topic, event, payload, true) + |> Enum.each(fn message -> + send_message_and_count(tenant, events_per_second_rate, message, true) end) # Handle events for private channel @@ -69,15 +71,11 @@ defmodule Realtime.Tenants.BatchBroadcast do |> Enum.group_by(fn event -> Map.get(event, :topic) end) |> Enum.each(fn {topic, events} -> if super_user do - Enum.each(events, fn %{topic: sub_topic, payload: payload, event: event} -> - send_message_and_count(tenant, events_per_second_rate, sub_topic, event, payload, false) - end) + Enum.each(events, fn message -> send_message_and_count(tenant, events_per_second_rate, message, false) end) else case permissions_for_message(tenant, auth_params, topic) do %Policies{broadcast: %BroadcastPolicies{write: true}} -> - Enum.each(events, fn %{topic: sub_topic, payload: payload, event: event} -> - send_message_and_count(tenant, events_per_second_rate, sub_topic, event, payload, false) - end) + Enum.each(events, fn message -> send_message_and_count(tenant, events_per_second_rate, message, false) end) _ -> nil @@ -86,22 +84,26 @@ defmodule Realtime.Tenants.BatchBroadcast do end) :ok + else + %Ecto.Changeset{valid?: false} = changeset -> {:error, changeset} + error -> error end end def broadcast(_, nil, _, _), do: {:error, :tenant_not_found} - def changeset(payload, attrs) do + defp changeset(payload, attrs, tenant) do payload |> cast(attrs, []) - |> cast_embed(:messages, required: true, with: &message_changeset/2) + |> cast_embed(:messages, required: true, with: fn message, attrs -> message_changeset(message, tenant, attrs) end) end - def message_changeset(message, attrs) do + defp message_changeset(message, tenant, attrs) do message - |> cast(attrs, [:topic, :payload, :event, :private]) + |> cast(attrs, [:id, :topic, :payload, :event, :private]) |> maybe_put_private_change() |> validate_required([:topic, :payload, :event]) + |> validate_payload_size(tenant) end defp maybe_put_private_change(changeset) do @@ -111,15 +113,37 @@ defmodule Realtime.Tenants.BatchBroadcast do end end + defp validate_payload_size(changeset, tenant) do + payload = get_change(changeset, :payload) + + case Tenants.validate_payload_size(tenant, payload) do + :ok -> changeset + _ -> add_error(changeset, :payload, "Payload size exceeds tenant limit") + end + end + @event_type "broadcast" - defp send_message_and_count(tenant, events_per_second_rate, topic, event, payload, public?) do - tenant_topic = Tenants.tenant_topic(tenant, topic, public?) - payload = %{"payload" => payload, "event" => event, "type" => "broadcast"} + defp send_message_and_count(tenant, events_per_second_rate, message, public?) do + tenant_topic = Tenants.tenant_topic(tenant, message.topic, public?) - broadcast = %Phoenix.Socket.Broadcast{topic: topic, event: @event_type, payload: payload} + payload = %{"payload" => message.payload, "event" => message.event, "type" => "broadcast"} + + payload = + if message[:id], + do: Map.put(payload, "meta", %{"id" => message.id}), + else: payload + + broadcast = %Phoenix.Socket.Broadcast{topic: message.topic, event: @event_type, payload: payload} GenCounter.add(events_per_second_rate.id) - TenantBroadcaster.pubsub_broadcast(tenant.external_id, tenant_topic, broadcast, RealtimeChannel.MessageDispatcher) + + TenantBroadcaster.pubsub_broadcast( + tenant.external_id, + tenant_topic, + broadcast, + RealtimeChannel.MessageDispatcher, + :broadcast + ) end defp permissions_for_message(_, nil, _), do: nil diff --git a/lib/realtime/tenants/cache.ex b/lib/realtime/tenants/cache.ex index aead951a3..cc02e0538 100644 --- a/lib/realtime/tenants/cache.ex +++ b/lib/realtime/tenants/cache.ex @@ -5,6 +5,7 @@ defmodule Realtime.Tenants.Cache do require Cachex.Spec require Logger + alias Realtime.GenRpc alias Realtime.Tenants def child_spec(_) do @@ -16,32 +17,42 @@ defmodule Realtime.Tenants.Cache do } end - def get_tenant_by_external_id(keyword), do: apply_repo_fun(__ENV__.function, [keyword]) + def get_tenant_by_external_id(tenant_id) do + case Cachex.fetch(__MODULE__, cache_key(tenant_id), fn _key -> + case Tenants.get_tenant_by_external_id(tenant_id) do + nil -> {:ignore, nil} + tenant -> {:commit, tenant} + end + end) do + {:commit, value} -> value + {:ok, value} -> value + {:ignore, value} -> value + end + end + + defp cache_key(tenant_id), do: {:get_tenant_by_external_id, tenant_id} @doc """ Invalidates the cache for a tenant in the local node """ - def invalidate_tenant_cache(tenant_id), do: Cachex.del(__MODULE__, {{:get_tenant_by_external_id, 1}, [tenant_id]}) + def invalidate_tenant_cache(tenant_id), do: Cachex.del(__MODULE__, cache_key(tenant_id)) + + def distributed_invalidate_tenant_cache(tenant_id) when is_binary(tenant_id) do + GenRpc.multicast(__MODULE__, :invalidate_tenant_cache, [tenant_id]) + end @doc """ - Broadcasts a message to invalidate the tenant cache to all connected nodes + Update the cache for a tenant """ - @spec distributed_invalidate_tenant_cache(String.t()) :: boolean() - def distributed_invalidate_tenant_cache(tenant_id) when is_binary(tenant_id) do - nodes = [Node.self() | Node.list()] - results = :erpc.multicall(nodes, __MODULE__, :invalidate_tenant_cache, [tenant_id], 1000) - - results - |> Enum.map(fn - {res, _} -> - res - - exception -> - Logger.error("Failed to invalidate tenant cache: #{inspect(exception)}") - :error - end) - |> Enum.all?(&(&1 == :ok)) + def update_cache(tenant) do + Cachex.put(__MODULE__, cache_key(tenant.external_id), tenant) end - defp apply_repo_fun(arg1, arg2), do: Realtime.ContextCache.apply_fun(Tenants, arg1, arg2) + @doc """ + Update the cache for a tenant in all nodes + """ + @spec global_cache_update(Realtime.Api.Tenant.t()) :: :ok + def global_cache_update(tenant) do + GenRpc.multicast(__MODULE__, :update_cache, [tenant]) + end end diff --git a/lib/realtime/tenants/connect.ex b/lib/realtime/tenants/connect.ex index b9bf00eb4..ff0a00643 100644 --- a/lib/realtime/tenants/connect.ex +++ b/lib/realtime/tenants/connect.ex @@ -11,16 +11,17 @@ defmodule Realtime.Tenants.Connect do use Realtime.Logs - alias Realtime.Tenants.Rebalancer alias Realtime.Api.Tenant + alias Realtime.GenCounter + alias Realtime.RateCounter alias Realtime.Rpc alias Realtime.Tenants alias Realtime.Tenants.Connect.CheckConnection alias Realtime.Tenants.Connect.GetTenant alias Realtime.Tenants.Connect.Piper alias Realtime.Tenants.Connect.RegisterProcess - alias Realtime.Tenants.Connect.StartCounters alias Realtime.Tenants.Migrations + alias Realtime.Tenants.Rebalancer alias Realtime.Tenants.ReplicationConnection alias Realtime.UsersCounter @@ -37,14 +38,19 @@ defmodule Realtime.Tenants.Connect do connected_users_bucket: [1], check_connect_region_interval: nil + @tenant_id_spec [{{:"$1", :_, :_, :_, :_, :_}, [], [:"$1"]}] + @spec list_tenants() :: [binary] + def list_tenants() do + :syn_registry_by_name + |> :syn_backbone.get_table_name(__MODULE__) + |> :ets.select(@tenant_id_spec) + end + @doc "Check if Connect has finished setting up connections" def ready?(tenant_id) do case whereis(tenant_id) do - pid when is_pid(pid) -> - GenServer.call(pid, :ready?) - - _ -> - false + pid when is_pid(pid) -> GenServer.call(pid, :ready?) + _ -> false end end @@ -56,20 +62,35 @@ defmodule Realtime.Tenants.Connect do | {:error, :tenant_database_unavailable} | {:error, :initializing} | {:error, :tenant_database_connection_initializing} + | {:error, :connect_rate_limit_reached} | {:error, :rpc_error, term()} def lookup_or_start_connection(tenant_id, opts \\ []) when is_binary(tenant_id) do - case get_status(tenant_id) do - {:ok, conn} -> - {:ok, conn} + rate_args = Tenants.connect_errors_per_second_rate(tenant_id) + RateCounter.new(rate_args) - {:error, :tenant_database_unavailable} -> - call_external_node(tenant_id, opts) + with {:ok, %{limit: %{triggered: false}}} <- RateCounter.get(rate_args), + {:ok, conn} <- get_status(tenant_id) do + {:ok, conn} + else + {:ok, %{limit: %{triggered: true}}} -> + {:error, :connect_rate_limit_reached} {:error, :tenant_database_connection_initializing} -> - call_external_node(tenant_id, opts) + case call_external_node(tenant_id, opts) do + {:ok, pid} -> + {:ok, pid} + + error -> + GenCounter.add(rate_args.id) + error + end {:error, :initializing} -> {:error, :tenant_database_unavailable} + + {:error, reason} -> + GenCounter.add(rate_args.id) + {:error, reason} end end @@ -81,16 +102,16 @@ defmodule Realtime.Tenants.Connect do | {:error, :tenant_database_unavailable} | {:error, :initializing} | {:error, :tenant_database_connection_initializing} + | {:error, :tenant_db_too_many_connections} def get_status(tenant_id) do case :syn.lookup(__MODULE__, tenant_id) do - {_pid, %{conn: nil}} -> - wait_for_connection(tenant_id) + {pid, %{conn: nil}} -> + wait_for_connection(pid, tenant_id) {_, %{conn: conn}} -> {:ok, conn} :undefined -> - Logger.warning("Connection process starting up") {:error, :tenant_database_connection_initializing} error -> @@ -101,7 +122,7 @@ defmodule Realtime.Tenants.Connect do def syn_topic(tenant_id), do: "connect:#{tenant_id}" - defp wait_for_connection(tenant_id) do + defp wait_for_connection(pid, tenant_id) do RealtimeWeb.Endpoint.subscribe(syn_topic(tenant_id)) # We do a lookup after subscribing because we could've missed a message while subscribing @@ -112,9 +133,18 @@ defmodule Realtime.Tenants.Connect do _ -> # Wait for up to 5 seconds for the ready event receive do - %{event: "ready", payload: %{conn: conn}} -> {:ok, conn} + %{event: "ready", payload: %{pid: ^pid, conn: conn}} -> + {:ok, conn} + + %{event: "connect_down", payload: %{pid: ^pid, reason: {:shutdown, :tenant_db_too_many_connections}}} -> + {:error, :tenant_db_too_many_connections} + + %{event: "connect_down", payload: %{pid: ^pid, reason: _reason}} -> + metadata = [external_id: tenant_id, project: tenant_id] + log_error("UnableToConnectToTenantDatabase", "Unable to connect to tenant database", metadata) + {:error, :tenant_database_unavailable} after - 5_000 -> {:error, :initializing} + 15_000 -> {:error, :initializing} end end after @@ -139,16 +169,6 @@ defmodule Realtime.Tenants.Connect do {:error, {:already_started, _}} -> get_status(tenant_id) - {:error, {:shutdown, :tenant_db_too_many_connections}} -> - {:error, :tenant_db_too_many_connections} - - {:error, {:shutdown, :tenant_not_found}} -> - {:error, :tenant_not_found} - - {:error, :shutdown} -> - log_error("UnableToConnectToTenantDatabase", "Unable to connect to tenant database", metadata) - {:error, :tenant_database_unavailable} - {:error, error} -> log_error("UnableToConnectToTenantDatabase", error, metadata) {:error, :tenant_database_unavailable} @@ -209,30 +229,33 @@ defmodule Realtime.Tenants.Connect do def init(%{tenant_id: tenant_id} = state) do Logger.metadata(external_id: tenant_id, project: tenant_id) + {:ok, state, {:continue, :db_connect}} + end + + @impl true + def handle_continue(:db_connect, state) do pipes = [ GetTenant, CheckConnection, - StartCounters, RegisterProcess ] case Piper.run(pipes, state) do {:ok, acc} -> - {:ok, acc, {:continue, :run_migrations}} + {:noreply, acc, {:continue, :run_migrations}} {:error, :tenant_not_found} -> - {:stop, {:shutdown, :tenant_not_found}} + {:stop, {:shutdown, :tenant_not_found}, state} {:error, :tenant_db_too_many_connections} -> - {:stop, {:shutdown, :tenant_db_too_many_connections}} + {:stop, {:shutdown, :tenant_db_too_many_connections}, state} {:error, error} -> log_error("UnableToConnectToTenantDatabase", error) - {:stop, :shutdown} + {:stop, :shutdown, state} end end - @impl true def handle_continue(:run_migrations, state) do %{tenant: tenant, db_conn_pid: db_conn_pid} = state Logger.warning("Tenant #{tenant.external_id} is initializing: #{inspect(node())}") @@ -252,31 +275,10 @@ defmodule Realtime.Tenants.Connect do end def handle_continue(:start_replication, state) do - %{tenant: tenant} = state - - with {:ok, replication_connection_pid} <- ReplicationConnection.start(tenant, self()) do - replication_connection_reference = Process.monitor(replication_connection_pid) - - state = %{ - state - | replication_connection_pid: replication_connection_pid, - replication_connection_reference: replication_connection_reference - } - - {:noreply, state, {:continue, :setup_connected_user_events}} - else - {:error, :max_wal_senders_reached} -> - log_error("ReplicationMaxWalSendersReached", "Tenant database has reached the maximum number of WAL senders") - {:stop, :shutdown, state} - - {:error, error} -> - log_error("StartReplicationFailed", error) - {:stop, :shutdown, state} + case start_replication_connection(state) do + {:ok, state} -> {:noreply, state, {:continue, :setup_connected_user_events}} + {:error, state} -> {:stop, :shutdown, state} end - rescue - error -> - log_error("StartReplicationFailed", error) - {:stop, :shutdown, state} end def handle_continue(:setup_connected_user_events, state) do @@ -348,13 +350,30 @@ defmodule Realtime.Tenants.Connect do {:stop, :shutdown, state} end + @replication_recovery_backoff 1000 + # Handle replication connection termination def handle_info( {:DOWN, replication_connection_reference, _, _, _}, %{replication_connection_reference: replication_connection_reference} = state ) do - Logger.warning("Replication connection has died") - {:stop, :shutdown, state} + log_warning("ReplicationConnectionDown", "Replication connection has been terminated") + Process.send_after(self(), :recover_replication_connection, @replication_recovery_backoff) + state = %{state | replication_connection_pid: nil, replication_connection_reference: nil} + {:noreply, state} + end + + @replication_connection_query "SELECT 1 from pg_stat_activity where application_name='realtime_replication_connection'" + def handle_info(:recover_replication_connection, state) do + with %{num_rows: 0} <- Postgrex.query!(state.db_conn_pid, @replication_connection_query, []), + {:ok, state} <- start_replication_connection(state) do + {:noreply, state} + else + _ -> + log_error("ReplicationConnectionRecoveryFailed", "Replication connection recovery failed") + Process.send_after(self(), :recover_replication_connection, @replication_recovery_backoff) + {:noreply, state} + end end def handle_info(_, state), do: {:noreply, state} @@ -369,12 +388,12 @@ defmodule Realtime.Tenants.Connect do @impl true def terminate(reason, %{tenant_id: tenant_id}) do Logger.info("Tenant #{tenant_id} has been terminated: #{inspect(reason)}") - Realtime.MetricsCleaner.delete_metric(tenant_id) :ok end ## Private functions defp call_external_node(tenant_id, opts) do + Logger.warning("Connection process starting up") rpc_timeout = Keyword.get(opts, :rpc_timeout, @rpc_timeout_default) with tenant <- Tenants.Cache.get_tenant_by_external_id(tenant_id), @@ -413,4 +432,32 @@ defmodule Realtime.Tenants.Connect do defp tenant_suspended?(_), do: :ok defp rebalance_check_interval_in_ms(), do: Application.fetch_env!(:realtime, :rebalance_check_interval_in_ms) + + defp start_replication_connection(state) do + %{tenant: tenant} = state + + with {:ok, replication_connection_pid} <- ReplicationConnection.start(tenant, self()) do + replication_connection_reference = Process.monitor(replication_connection_pid) + + state = %{ + state + | replication_connection_pid: replication_connection_pid, + replication_connection_reference: replication_connection_reference + } + + {:ok, state} + else + {:error, :max_wal_senders_reached} -> + log_error("ReplicationMaxWalSendersReached", "Tenant database has reached the maximum number of WAL senders") + {:error, state} + + {:error, error} -> + log_error("StartReplicationFailed", error) + {:error, state} + end + rescue + error -> + log_error("StartReplicationFailed", error) + {:error, state} + end end diff --git a/lib/realtime/tenants/connect/check_connection.ex b/lib/realtime/tenants/connect/check_connection.ex index 697c08b6c..53cd8e480 100644 --- a/lib/realtime/tenants/connect/check_connection.ex +++ b/lib/realtime/tenants/connect/check_connection.ex @@ -2,16 +2,14 @@ defmodule Realtime.Tenants.Connect.CheckConnection do @moduledoc """ Check tenant database connection. """ - alias Realtime.Database @behaviour Realtime.Tenants.Connect.Piper @impl true def run(acc) do %{tenant: tenant} = acc - case Database.check_tenant_connection(tenant) do + case Realtime.Database.check_tenant_connection(tenant) do {:ok, conn} -> - Process.link(conn) db_conn_reference = Process.monitor(conn) {:ok, %{acc | db_conn_pid: conn, db_conn_reference: db_conn_reference}} diff --git a/lib/realtime/tenants/connect/start_counters.ex b/lib/realtime/tenants/connect/start_counters.ex deleted file mode 100644 index f8ce6c378..000000000 --- a/lib/realtime/tenants/connect/start_counters.ex +++ /dev/null @@ -1,60 +0,0 @@ -defmodule Realtime.Tenants.Connect.StartCounters do - @moduledoc """ - Start tenant counters. - """ - - alias Realtime.RateCounter - alias Realtime.Tenants - - @behaviour Realtime.Tenants.Connect.Piper - - @impl true - def run(acc) do - %{tenant: tenant} = acc - - with :ok <- start_joins_per_second_counter(tenant), - :ok <- start_max_events_counter(tenant), - :ok <- start_db_events_counter(tenant) do - {:ok, acc} - end - end - - def start_joins_per_second_counter(tenant) do - res = - tenant - |> Tenants.joins_per_second_rate() - |> RateCounter.new() - - case res do - {:ok, _} -> :ok - {:error, {:already_started, _}} -> :ok - {:error, reason} -> {:error, reason} - end - end - - def start_max_events_counter(tenant) do - res = - tenant - |> Tenants.events_per_second_rate() - |> RateCounter.new() - - case res do - {:ok, _} -> :ok - {:error, {:already_started, _}} -> :ok - {:error, reason} -> {:error, reason} - end - end - - def start_db_events_counter(tenant) do - res = - tenant - |> Tenants.db_events_per_second_rate() - |> RateCounter.new() - - case res do - {:ok, _} -> :ok - {:error, {:already_started, _}} -> :ok - {:error, reason} -> {:error, reason} - end - end -end diff --git a/lib/realtime/tenants/migrations.ex b/lib/realtime/tenants/migrations.ex index 04475c2b7..47f4ad718 100644 --- a/lib/realtime/tenants/migrations.ex +++ b/lib/realtime/tenants/migrations.ex @@ -10,6 +10,9 @@ defmodule Realtime.Tenants.Migrations do alias Realtime.Registry.Unique alias Realtime.Repo alias Realtime.Api.Tenant + alias Realtime.Api + alias Realtime.Nodes + alias Realtime.GenRpc alias Realtime.Tenants.Migrations.{ CreateRealtimeSubscriptionTable, @@ -74,7 +77,9 @@ defmodule Realtime.Tenants.Migrations do RealtimeSendSetsTopicConfig, SubscriptionIndexBridgingDisabled, RunSubscriptionIndexBridgingDisabled, - BroadcastSendErrorLogging + BroadcastSendErrorLogging, + CreateMessagesReplayIndex, + BroadcastSendIncludePayloadId } @migrations [ @@ -140,10 +145,12 @@ defmodule Realtime.Tenants.Migrations do {20_250_128_220_012, RealtimeSendSetsTopicConfig}, {20_250_506_224_012, SubscriptionIndexBridgingDisabled}, {20_250_523_164_012, RunSubscriptionIndexBridgingDisabled}, - {20_250_714_121_412, BroadcastSendErrorLogging} + {20_250_714_121_412, BroadcastSendErrorLogging}, + {20_250_905_041_441, CreateMessagesReplayIndex}, + {20_251_103_001_201, BroadcastSendIncludePayloadId} ] - defstruct [:tenant_external_id, :settings] + defstruct [:tenant_external_id, :settings, migrations_ran: 0] @type t :: %__MODULE__{ tenant_external_id: binary(), @@ -155,24 +162,39 @@ defmodule Realtime.Tenants.Migrations do """ @spec run_migrations(Tenant.t()) :: :ok | :noop | {:error, any()} def run_migrations(%Tenant{} = tenant) do - %{extensions: [%{settings: settings} | _]} = tenant - attrs = %__MODULE__{tenant_external_id: tenant.external_id, settings: settings} + if Tenants.run_migrations?(tenant) do + %{extensions: [%{settings: settings} | _]} = tenant - supervisor = - {:via, PartitionSupervisor, {Realtime.Tenants.Migrations.DynamicSupervisor, tenant.external_id}} + attrs = %__MODULE__{ + tenant_external_id: tenant.external_id, + settings: settings, + migrations_ran: tenant.migrations_ran + } - spec = {__MODULE__, attrs} + node = + case Nodes.get_node_for_tenant(tenant) do + {:ok, node, _} -> node + {:error, _} -> node() + end - if Tenants.run_migrations?(tenant) do - case DynamicSupervisor.start_child(supervisor, spec) do - :ignore -> :ok - error -> error - end + GenRpc.call(node, __MODULE__, :start_migration, [attrs], tenant_id: tenant.external_id) else :noop end end + def start_migration(attrs) do + supervisor = + {:via, PartitionSupervisor, {Realtime.Tenants.Migrations.DynamicSupervisor, attrs.tenant_external_id}} + + spec = {__MODULE__, attrs} + + case DynamicSupervisor.start_child(supervisor, spec) do + :ignore -> :ok + error -> error + end + end + def start_link(%__MODULE__{tenant_external_id: tenant_external_id} = attrs) do name = {:via, Registry, {Unique, {__MODULE__, :host, tenant_external_id}}} GenServer.start_link(__MODULE__, attrs, name: name) @@ -183,7 +205,7 @@ defmodule Realtime.Tenants.Migrations do case migrate(settings) do :ok -> - Task.Supervisor.async_nolink(__MODULE__.TaskSupervisor, Tenants, :update_migrations_ran, [ + Task.Supervisor.async_nolink(__MODULE__.TaskSupervisor, Api, :update_migrations_ran, [ tenant_external_id, Enum.count(@migrations) ]) diff --git a/lib/realtime/tenants/replication_connection.ex b/lib/realtime/tenants/replication_connection.ex index 45e03c66e..26650ad5e 100644 --- a/lib/realtime/tenants/replication_connection.ex +++ b/lib/realtime/tenants/replication_connection.ex @@ -39,6 +39,7 @@ defmodule Realtime.Tenants.ReplicationConnection do | :check_replication_slot | :create_publication | :check_publication + | :validate_publication | :create_slot | :start_replication_slot | :streaming, @@ -57,7 +58,7 @@ defmodule Realtime.Tenants.ReplicationConnection do publication_name: nil, replication_slot_name: nil, output_plugin: "pgoutput", - proto_version: 1, + proto_version: 2, relations: %{}, buffer: [], monitored_pid: nil, @@ -144,8 +145,8 @@ defmodule Realtime.Tenants.ReplicationConnection do port: connection_opts.port, socket_options: connection_opts.socket_options, ssl: connection_opts.ssl, - backoff_type: :stop, sync_connect: true, + auto_reconnect: false, parameters: [application_name: "realtime_replication_connection"] ] @@ -159,6 +160,7 @@ defmodule Realtime.Tenants.ReplicationConnection do @impl true def init(%__MODULE__{tenant_id: tenant_id, monitored_pid: monitored_pid} = state) do + Process.flag(:fullsweep_after, 20) Logger.metadata(external_id: tenant_id, project: tenant_id) Process.monitor(monitored_pid) @@ -221,27 +223,61 @@ defmodule Realtime.Tenants.ReplicationConnection do end def handle_result([%Postgrex.Result{num_rows: 1}], %__MODULE__{step: :create_publication} = state) do - {:query, "SELECT 1", %{state | step: :start_replication_slot}} + %__MODULE__{publication_name: publication_name} = state + + Logger.info("Publication #{publication_name} exists, validating contents") + + query = """ + SELECT schemaname, tablename + FROM pg_publication_tables + WHERE pubname = '#{publication_name}' + """ + + {:query, query, %{state | step: :validate_publication}} end - def handle_result([%Postgrex.Result{}], %__MODULE__{step: :start_replication_slot} = state) do - %__MODULE__{ - proto_version: proto_version, - replication_slot_name: replication_slot_name, - publication_name: publication_name - } = state + def handle_result([%Postgrex.Result{rows: rows}], %__MODULE__{step: :validate_publication} = state) do + %__MODULE__{publication_name: publication_name} = state - Logger.info( - "Starting stream replication for slot #{replication_slot_name} using publication #{publication_name} and protocol version #{proto_version}" - ) + valid_tables = + Enum.all?(rows, fn [schema, table] -> + schema == @schema and (table == @table or String.starts_with?(table, "#{@table}_")) + end) - query = - "START_REPLICATION SLOT #{replication_slot_name} LOGICAL 0/0 (proto_version '#{proto_version}', publication_names '#{publication_name}')" + if valid_tables and rows != [] do + {:query, "SELECT 1", %{state | step: :start_replication_slot}} + else + query = + "DROP PUBLICATION IF EXISTS #{publication_name}; CREATE PUBLICATION #{publication_name} FOR TABLE #{@schema}.#{@table}" - {:stream, query, [], %{state | step: :streaming}} + Logger.warning("Publication #{publication_name} contains unexpected tables. Recreating...") + {:query, query, %{state | step: :start_replication_slot}} + end + end + + def handle_result(results, %__MODULE__{step: :start_replication_slot} = state) do + error = Enum.find(results, fn res -> match?(Postgrex.Error, res) end) + + if error do + {:disconnect, "Error starting replication: #{error.message}"} + else + %__MODULE__{ + proto_version: proto_version, + replication_slot_name: replication_slot_name, + publication_name: publication_name + } = state + + Logger.info( + "Starting stream replication for slot #{replication_slot_name} using publication #{publication_name} and protocol version #{proto_version}" + ) + + query = + "START_REPLICATION SLOT #{replication_slot_name} LOGICAL 0/0 (proto_version '#{proto_version}', publication_names '#{publication_name}', binary 'true')" + + {:stream, query, [], %{state | step: :streaming}} + end end - # %Postgrex.Error{message: nil, postgres: %{code: :configuration_limit_exceeded, line: "291", message: "all replication slots are in use", file: "slot.c", unknown: "ERROR", severity: "ERROR", hint: "Free one or increase max_replication_slots.", routine: "ReplicationSlotCreate", pg_code: "53400"}, connection_id: 217538, query: nil} def handle_result(%Postgrex.Error{postgres: %{pg_code: pg_code}}, _state) when pg_code in ~w(53300 53400) do {:disconnect, :max_wal_senders_reached} end @@ -266,8 +302,7 @@ defmodule Realtime.Tenants.ReplicationConnection do def handle_data(data, state) when is_write(data) do %Write{message: message} = parse(data) - message |> decode_message() |> then(&send(self(), &1)) - {:noreply, [], state} + message |> decode_message(state.relations) |> then(&handle_message(&1, state)) end def handle_data(e, state) do @@ -276,17 +311,27 @@ defmodule Realtime.Tenants.ReplicationConnection do end @impl true - def handle_info(%Decoder.Messages.Begin{commit_timestamp: commit_timestamp}, state) do + + def handle_info({:DOWN, _, :process, _, _}, _), do: {:disconnect, :shutdown} + def handle_info(_, state), do: {:noreply, state} + + defp handle_message(%Decoder.Messages.Begin{commit_timestamp: commit_timestamp}, state) do latency_committed_at = NaiveDateTime.utc_now() |> NaiveDateTime.diff(commit_timestamp, :millisecond) {:noreply, %{state | latency_committed_at: latency_committed_at}} end - def handle_info(%Decoder.Messages.Relation{} = msg, state) do + defp handle_message(%Decoder.Messages.Relation{} = msg, state) do %Decoder.Messages.Relation{id: id, namespace: namespace, name: name, columns: columns} = msg - %{relations: relations} = state - relation = %{name: name, columns: columns, namespace: namespace} - relations = Map.put(relations, id, relation) - {:noreply, %{state | relations: relations}} + # Only care about relations with namespace=realtime and name starting with messages + if namespace == @schema and String.starts_with?(name, @table) do + %{relations: relations} = state + relation = %{name: name, columns: columns, namespace: namespace} + relations = Map.put(relations, id, relation) + {:noreply, %{state | relations: relations}} + else + Logger.warning("Unexpected relation on schema '#{namespace}' and table '#{name}'") + {:noreply, state} + end rescue e -> log_error("UnableToBroadcastChanges", e) @@ -297,7 +342,7 @@ defmodule Realtime.Tenants.ReplicationConnection do {:noreply, state} end - def handle_info(%Decoder.Messages.Insert{} = msg, state) do + defp handle_message(%Decoder.Messages.Insert{} = msg, state) do %Decoder.Messages.Insert{relation_id: relation_id, tuple_data: tuple_data} = msg %{relations: relations, tenant_id: tenant_id, latency_committed_at: latency_committed_at} = state @@ -310,10 +355,15 @@ defmodule Realtime.Tenants.ReplicationConnection do {:ok, topic} <- get_or_error(to_broadcast, "topic", :topic_missing), {:ok, private} <- get_or_error(to_broadcast, "private", :private_missing), %Tenant{} = tenant <- Cache.get_tenant_by_external_id(tenant_id), - broadcast_message = %{topic: topic, event: event, private: private, payload: Map.put_new(payload, "id", id)}, + broadcast_message = %{ + id: id, + topic: topic, + event: event, + private: private, + payload: Jason.Fragment.new(payload) + }, :ok <- BatchBroadcast.broadcast(nil, tenant, %{messages: [broadcast_message]}, true) do - inserted_at = NaiveDateTime.from_iso8601!(inserted_at) - latency_inserted_at = NaiveDateTime.utc_now() |> NaiveDateTime.diff(inserted_at) + latency_inserted_at = NaiveDateTime.utc_now(:microsecond) |> NaiveDateTime.diff(inserted_at, :microsecond) Telemetry.execute( [:realtime, :tenants, :broadcast_from_database], @@ -323,6 +373,11 @@ defmodule Realtime.Tenants.ReplicationConnection do {:noreply, state} else + {:error, %Ecto.Changeset{valid?: false} = changeset} -> + error = Ecto.Changeset.traverse_errors(changeset, &elem(&1, 0)) + log_error("UnableToBroadcastChanges", error) + {:noreply, state} + {:error, error} -> log_error("UnableToBroadcastChanges", error) {:noreply, state} @@ -340,9 +395,7 @@ defmodule Realtime.Tenants.ReplicationConnection do {:noreply, state} end - def handle_info({:DOWN, _, :process, _, _}, _), do: {:disconnect, :shutdown} - def handle_info(_, state), do: {:noreply, state} - + defp handle_message(_, state), do: {:noreply, state} @impl true def handle_disconnect(state) do Logger.warning("Disconnecting broadcast changes handler in the step : #{inspect(state.step)}") @@ -370,8 +423,7 @@ defmodule Realtime.Tenants.ReplicationConnection do |> Enum.zip(columns) |> Map.new(fn {nil, %{name: name}} -> {name, nil} - {value, %{name: name, type: "jsonb"}} -> {name, Jason.decode!(value)} - {value, %{name: name, type: "bool"}} -> {name, value == "t"} + {value, %{name: name, type: "bool"}} -> {name, value} {value, %{name: name}} -> {name, value} end) end diff --git a/lib/realtime/tenants/repo.ex b/lib/realtime/tenants/repo.ex new file mode 100644 index 000000000..18c9c893f --- /dev/null +++ b/lib/realtime/tenants/repo.ex @@ -0,0 +1,253 @@ +defmodule Realtime.Tenants.Repo do + @moduledoc """ + Database operations done against the tenant database + """ + use Realtime.Logs + import Ecto.Query + alias Realtime.Repo.Replica + + @doc """ + Lists all records for a given query and converts them into a given struct + """ + @spec all(DBConnection.conn(), Ecto.Queryable.t(), module(), [Postgrex.execute_option()]) :: + {:ok, list(struct())} | {:error, any()} + def all(conn, query, result_struct, opts \\ []) do + conn + |> run_all_query(query, opts) + |> result_to_structs(result_struct) + end + + @doc """ + Fetches one record for a given query and converts it into a given struct + """ + @spec one( + DBConnection.conn(), + Ecto.Query.t(), + module(), + Postgrex.option() | Keyword.t() + ) :: + {:error, any()} | {:ok, struct()} | Ecto.Changeset.t() + def one(conn, query, result_struct, opts \\ []) do + conn + |> run_all_query(query, opts) + |> result_to_single_struct(result_struct, nil) + end + + @doc """ + Inserts a given changeset into the database and converts the result into a given struct + """ + @spec insert( + DBConnection.conn(), + Ecto.Changeset.t(), + module(), + Postgrex.option() | Keyword.t() + ) :: + {:ok, struct()} | {:error, any()} | Ecto.Changeset.t() + def insert(conn, changeset, result_struct, opts \\ []) do + with {:ok, {query, args}} <- insert_query_from_changeset(changeset) do + conn + |> run_query_with_trap(query, args, opts) + |> result_to_single_struct(result_struct, changeset) + end + end + + @doc """ + Inserts all changesets into the database and converts the result into a given list of structs + """ + @spec insert_all_entries( + DBConnection.conn(), + [Ecto.Changeset.t()], + module(), + Postgrex.option() | Keyword.t() + ) :: + {:ok, [struct()]} | {:error, any()} | Ecto.Changeset.t() + def insert_all_entries(conn, changesets, result_struct, opts \\ []) do + with {:ok, {query, args}} <- insert_all_query_from_changeset(changesets) do + conn + |> run_query_with_trap(query, args, opts) + |> result_to_structs(result_struct) + end + end + + @doc """ + Deletes records for a given query and returns the number of deleted records + """ + @spec del(DBConnection.conn(), Ecto.Queryable.t()) :: + {:ok, non_neg_integer()} | {:error, any()} + def del(conn, query) do + with {:ok, %Postgrex.Result{num_rows: num_rows}} <- run_delete_query(conn, query) do + {:ok, num_rows} + end + end + + @doc """ + Updates an entry based on the changeset and returns the updated entry + """ + @spec update(DBConnection.conn(), Ecto.Changeset.t(), module()) :: + {:ok, struct()} | {:error, any()} | Ecto.Changeset.t() + def update(conn, changeset, result_struct, opts \\ []) do + with {:ok, {query, args}} <- update_query_from_changeset(changeset) do + conn + |> run_query_with_trap(query, args, opts) + |> result_to_single_struct(result_struct, changeset) + end + end + + defp result_to_single_struct( + {:error, %Postgrex.Error{postgres: %{code: :unique_violation, constraint: "channels_name_index"}}}, + _struct, + changeset + ) do + Ecto.Changeset.add_error(changeset, :name, "has already been taken") + end + + defp result_to_single_struct({:error, _} = error, _, _), do: error + + defp result_to_single_struct({:ok, %Postgrex.Result{rows: []}}, _, _) do + {:error, :not_found} + end + + defp result_to_single_struct({:ok, %Postgrex.Result{rows: [row], columns: columns}}, struct, _) do + repo_module = Replica.replica() + {:ok, repo_module.load(struct, Enum.zip(columns, row))} + end + + defp result_to_single_struct({:ok, %Postgrex.Result{num_rows: num_rows}}, _, _) do + raise("expected at most one result but got #{num_rows} in result") + end + + defp result_to_structs({:error, _} = error, _), do: error + + defp result_to_structs({:ok, %Postgrex.Result{rows: rows, columns: columns}}, struct) do + repo_module = Replica.replica() + {:ok, Enum.map(rows, &repo_module.load(struct, Enum.zip(columns, &1)))} + end + + defp insert_query_from_changeset(%{valid?: false} = changeset), do: {:error, changeset} + + defp insert_query_from_changeset(changeset) do + schema = changeset.data.__struct__ + source = schema.__schema__(:source) + prefix = schema.__schema__(:prefix) + acc = %{header: [], rows: []} + + %{header: header, rows: rows} = + Enum.reduce(changeset.changes, acc, fn {field, row}, %{header: header, rows: rows} -> + row = + case row do + row when is_boolean(row) -> row + row when is_atom(row) -> Atom.to_string(row) + _ -> row + end + + %{ + header: [Atom.to_string(field) | header], + rows: [row | rows] + } + end) + + table = "\"#{prefix}\".\"#{source}\"" + header = "(#{Enum.map_join(header, ",", &"\"#{&1}\"")})" + + arg_index = + rows + |> Enum.with_index(1) + |> Enum.map_join(",", fn {_, index} -> "$#{index}" end) + + {:ok, {"INSERT INTO #{table} #{header} VALUES (#{arg_index}) RETURNING *", rows}} + end + + defp insert_all_query_from_changeset(changesets) do + invalid = Enum.filter(changesets, &(!&1.valid?)) + + if invalid != [] do + {:error, changesets} + else + [schema] = changesets |> Enum.map(& &1.data.__struct__) |> Enum.uniq() + + source = schema.__schema__(:source) + prefix = schema.__schema__(:prefix) + changes = Enum.map(changesets, & &1.changes) + + %{header: header, rows: rows} = + Enum.reduce(changes, %{header: [], rows: []}, fn v, changes_acc -> + Enum.reduce(v, changes_acc, fn {field, row}, %{header: header, rows: rows} -> + row = + case row do + row when is_boolean(row) -> row + row when is_atom(row) -> Atom.to_string(row) + _ -> row + end + + %{ + header: Enum.uniq([Atom.to_string(field) | header]), + rows: [row | rows] + } + end) + end) + + args_index = + rows + |> Enum.chunk_every(length(header)) + |> Enum.reduce({"", 1}, fn row, {acc, count} -> + arg_index = + row + |> Enum.with_index(count) + |> Enum.map_join("", fn {_, index} -> "$#{index}," end) + |> String.trim_trailing(",") + |> then(&"(#{&1})") + + {"#{acc},#{arg_index}", count + length(row)} + end) + |> elem(0) + |> String.trim_leading(",") + + table = "\"#{prefix}\".\"#{source}\"" + header = "(#{Enum.map_join(header, ",", &"\"#{&1}\"")})" + {:ok, {"INSERT INTO #{table} #{header} VALUES #{args_index} RETURNING *", rows}} + end + end + + defp update_query_from_changeset(%{valid?: false} = changeset), do: {:error, changeset} + + defp update_query_from_changeset(changeset) do + repo_module = Replica.replica() + %Ecto.Changeset{data: %{id: id, __struct__: struct}, changes: changes} = changeset + changes = Keyword.new(changes) + query = from(c in struct, where: c.id == ^id, select: c, update: [set: ^changes]) + {:ok, repo_module.to_sql(:update_all, query)} + end + + defp run_all_query(conn, query, opts) do + repo_module = Replica.replica() + {query, args} = repo_module.to_sql(:all, query) + run_query_with_trap(conn, query, args, opts) + end + + defp run_delete_query(conn, query) do + repo_module = Replica.replica() + {query, args} = repo_module.to_sql(:delete_all, query) + run_query_with_trap(conn, query, args) + end + + defp run_query_with_trap(conn, query, args, opts \\ []) do + Postgrex.query(conn, query, args, opts) + rescue + e -> + log_error("ErrorRunningQuery", e) + {:error, :postgrex_exception} + catch + :exit, {:noproc, {DBConnection.Holder, :checkout, _}} -> + log_error( + "UnableCheckoutConnection", + "Unable to checkout connection, please check your connection pool configuration" + ) + + {:error, :postgrex_exception} + + :exit, reason -> + log_error("UnknownError", reason) + + {:error, :postgrex_exception} + end +end diff --git a/lib/realtime/tenants/repo/migrations/20250905041441_create_messages_replay_index.ex b/lib/realtime/tenants/repo/migrations/20250905041441_create_messages_replay_index.ex new file mode 100644 index 000000000..77afde6e0 --- /dev/null +++ b/lib/realtime/tenants/repo/migrations/20250905041441_create_messages_replay_index.ex @@ -0,0 +1,11 @@ +defmodule Realtime.Tenants.Migrations.CreateMessagesReplayIndex do + @moduledoc false + + use Ecto.Migration + + def change do + create_if_not_exists index(:messages, [{:desc, :inserted_at}, :topic], + where: "extension = 'broadcast' and private IS TRUE" + ) + end +end diff --git a/lib/realtime/tenants/repo/migrations/20251103001201_broadcast_send_include_payload_id.ex b/lib/realtime/tenants/repo/migrations/20251103001201_broadcast_send_include_payload_id.ex new file mode 100644 index 000000000..ba526d9e6 --- /dev/null +++ b/lib/realtime/tenants/repo/migrations/20251103001201_broadcast_send_include_payload_id.ex @@ -0,0 +1,41 @@ +defmodule Realtime.Tenants.Migrations.BroadcastSendIncludePayloadId do + @moduledoc false + use Ecto.Migration + + # Include ID in the payload if not defined + def change do + execute(""" + CREATE OR REPLACE FUNCTION realtime.send(payload jsonb, event text, topic text, private boolean DEFAULT true ) RETURNS void + AS $$ + DECLARE + generated_id uuid; + final_payload jsonb; + BEGIN + BEGIN + -- Generate a new UUID for the id + generated_id := gen_random_uuid(); + + -- Check if payload has an 'id' key, if not, add the generated UUID + IF payload ? 'id' THEN + final_payload := payload; + ELSE + final_payload := jsonb_set(payload, '{id}', to_jsonb(generated_id)); + END IF; + + -- Set the topic configuration + EXECUTE format('SET LOCAL realtime.topic TO %L', topic); + + -- Attempt to insert the message + INSERT INTO realtime.messages (id, payload, event, topic, private, extension) + VALUES (generated_id, final_payload, event, topic, private, 'broadcast'); + EXCEPTION + WHEN OTHERS THEN + -- Capture and notify the error + RAISE WARNING 'ErrorSendingBroadcastMessage: %', SQLERRM; + END; + END; + $$ + LANGUAGE plpgsql; + """) + end +end diff --git a/lib/realtime/user_counter.ex b/lib/realtime/user_counter.ex index 6190030d9..b6f85a920 100644 --- a/lib/realtime/user_counter.ex +++ b/lib/realtime/user_counter.ex @@ -8,17 +8,89 @@ defmodule Realtime.UsersCounter do Adds a RealtimeChannel pid to the `:users` scope for a tenant so we can keep track of all connected clients for a tenant. """ @spec add(pid(), String.t()) :: :ok - def add(pid, tenant), do: :syn.join(:users, tenant, pid) + def add(pid, tenant_id) when is_pid(pid) and is_binary(tenant_id) do + beacon_join(pid, tenant_id) + tenant_id |> scope() |> :syn.join(tenant_id, pid) + end + + defp beacon_join(pid, tenant_id) do + :ok = Beacon.join(:users, tenant_id, pid) + rescue + _ -> Logger.error("Failed to join Beacon users scope for tenant #{tenant_id}") + end @doc """ Returns the count of all connected clients for a tenant for the cluster. """ @spec tenant_users(String.t()) :: non_neg_integer() - def tenant_users(tenant), do: :syn.member_count(:users, tenant) + def tenant_users(tenant_id), do: tenant_id |> scope() |> :syn.member_count(tenant_id) @doc """ Returns the count of all connected clients for a tenant for a single node. """ @spec tenant_users(atom, String.t()) :: non_neg_integer() - def tenant_users(node_name, tenant), do: :syn.member_count(:users, tenant, node_name) + def tenant_users(node_name, tenant_id), do: tenant_id |> scope() |> :syn.member_count(tenant_id, node_name) + + @count_all_nodes_spec [ + { + # Match the tuple structure, capture group_name + {{:"$1", :_}, :_, :_, :_, :_}, + # No guards + [], + # Return only the group_name + [:"$1"] + } + ] + + @doc """ + Returns the counts of all connected clients for all tenants for the cluster. + """ + @spec tenant_counts() :: %{String.t() => non_neg_integer()} + def tenant_counts() do + scopes() + |> Stream.flat_map(fn scope -> + :syn_backbone.get_table_name(:syn_pg_by_name, scope) + |> :ets.select(@count_all_nodes_spec) + end) + |> Enum.frequencies() + end + + @doc """ + Returns the counts of all connected clients for all tenants for a single node. + """ + @spec tenant_counts(node) :: %{String.t() => non_neg_integer()} + def tenant_counts(node) do + count_single_node_spec = [ + { + # Match the tuple structure with specific node, capture group_name + {{:"$1", :_}, :_, :_, :_, node}, + # No guards + [], + # Return only the group_name + [:"$1"] + } + ] + + scopes() + |> Stream.flat_map(fn scope -> + :syn_backbone.get_table_name(:syn_pg_by_name, scope) + |> :ets.select(count_single_node_spec) + end) + |> Enum.frequencies() + end + + @doc """ + Returns the scope for a given tenant id. + """ + @spec scope(String.t()) :: atom() + def scope(tenant_id) do + shards = Application.fetch_env!(:realtime, :users_scope_shards) + shard = :erlang.phash2(tenant_id, shards) + :"users_#{shard}" + end + + def scopes() do + shards = Application.fetch_env!(:realtime, :users_scope_shards) + Enum.map(0..(shards - 1), fn shard -> :"users_#{shard}" end) + end end diff --git a/lib/realtime_web/channels/auth/channels_authorization.ex b/lib/realtime_web/channels/auth/channels_authorization.ex index 56c574f34..b5eeacc2f 100644 --- a/lib/realtime_web/channels/auth/channels_authorization.ex +++ b/lib/realtime_web/channels/auth/channels_authorization.ex @@ -20,10 +20,10 @@ defmodule RealtimeWeb.ChannelsAuthorization do def authorize_conn(token, jwt_secret, jwt_jwks) do case authorize(token, jwt_secret, jwt_jwks) do {:ok, claims} -> - required = MapSet.new(["role", "exp"]) - claims_keys = claims |> Map.keys() |> MapSet.new() + required = ["role", "exp"] + claims_keys = Map.keys(claims) - if MapSet.subset?(required, claims_keys), + if Enum.all?(required, &(&1 in claims_keys)), do: {:ok, claims}, else: {:error, :missing_claims} diff --git a/lib/realtime_web/channels/payloads/broadcast.ex b/lib/realtime_web/channels/payloads/broadcast.ex index 7feddb043..e2881fd54 100644 --- a/lib/realtime_web/channels/payloads/broadcast.ex +++ b/lib/realtime_web/channels/payloads/broadcast.ex @@ -9,9 +9,11 @@ defmodule RealtimeWeb.Channels.Payloads.Broadcast do embedded_schema do field :ack, :boolean, default: false field :self, :boolean, default: false + embeds_one :replay, RealtimeWeb.Channels.Payloads.Broadcast.Replay end def changeset(broadcast, attrs) do cast(broadcast, attrs, [:ack, :self], message: &Join.error_message/2) + |> cast_embed(:replay, invalid_message: "unable to parse, expected a map") end end diff --git a/lib/realtime_web/channels/payloads/broadcast/replay.ex b/lib/realtime_web/channels/payloads/broadcast/replay.ex new file mode 100644 index 000000000..b0a5804a2 --- /dev/null +++ b/lib/realtime_web/channels/payloads/broadcast/replay.ex @@ -0,0 +1,17 @@ +defmodule RealtimeWeb.Channels.Payloads.Broadcast.Replay do + @moduledoc """ + Validate broadcast replay field of the join payload. + """ + use Ecto.Schema + import Ecto.Changeset + alias RealtimeWeb.Channels.Payloads.Join + + embedded_schema do + field :limit, :integer, default: 10 + field :since, :integer, default: 0 + end + + def changeset(broadcast, attrs) do + cast(broadcast, attrs, [:limit, :since], message: &Join.error_message/2) + end +end diff --git a/lib/realtime_web/channels/payloads/config.ex b/lib/realtime_web/channels/payloads/config.ex index 923020174..029aa93b5 100644 --- a/lib/realtime_web/channels/payloads/config.ex +++ b/lib/realtime_web/channels/payloads/config.ex @@ -17,6 +17,14 @@ defmodule RealtimeWeb.Channels.Payloads.Config do end def changeset(config, attrs) do + attrs = + attrs + |> Enum.map(fn + {k, v} when is_list(v) -> {k, Enum.filter(v, fn v -> v != nil end)} + {k, v} -> {k, v} + end) + |> Map.new() + config |> cast(attrs, [:private], message: &Join.error_message/2) |> cast_embed(:broadcast, invalid_message: "unable to parse, expected a map") diff --git a/lib/realtime_web/channels/payloads/presence.ex b/lib/realtime_web/channels/payloads/presence.ex index 53e09047d..785df9222 100644 --- a/lib/realtime_web/channels/payloads/presence.ex +++ b/lib/realtime_web/channels/payloads/presence.ex @@ -8,7 +8,7 @@ defmodule RealtimeWeb.Channels.Payloads.Presence do embedded_schema do field :enabled, :boolean, default: true - field :key, :string, default: UUID.uuid1() + field :key, :any, default: UUID.uuid1(), virtual: true end def changeset(presence, attrs) do diff --git a/lib/realtime_web/channels/presence.ex b/lib/realtime_web/channels/presence.ex index f4d378b92..9e173febe 100644 --- a/lib/realtime_web/channels/presence.ex +++ b/lib/realtime_web/channels/presence.ex @@ -8,5 +8,6 @@ defmodule RealtimeWeb.Presence do use Phoenix.Presence, otp_app: :realtime, pubsub_server: Realtime.PubSub, + dispatcher: RealtimeWeb.RealtimeChannel.MessageDispatcher, pool_size: 10 end diff --git a/lib/realtime_web/channels/realtime_channel.ex b/lib/realtime_web/channels/realtime_channel.ex index 26c033f5c..70a426357 100644 --- a/lib/realtime_web/channels/realtime_channel.ex +++ b/lib/realtime_web/channels/realtime_channel.ex @@ -18,7 +18,6 @@ defmodule RealtimeWeb.RealtimeChannel do alias Realtime.Tenants.Authorization alias Realtime.Tenants.Authorization.Policies alias Realtime.Tenants.Authorization.Policies.BroadcastPolicies - alias Realtime.Tenants.Authorization.Policies.PresencePolicies alias Realtime.Tenants.Connect alias RealtimeWeb.Channels.Payloads.Join @@ -29,6 +28,7 @@ defmodule RealtimeWeb.RealtimeChannel do alias RealtimeWeb.RealtimeChannel.Tracker @confirm_token_ms_interval :timer.minutes(5) + @fullsweep_after Application.compile_env!(:realtime, :websocket_fullsweep_after) @impl true def join("realtime:", _params, socket) do @@ -43,6 +43,8 @@ defmodule RealtimeWeb.RealtimeChannel do transport_pid: transport_pid } = socket + Process.flag(:max_heap_size, max_heap_size()) + Process.flag(:fullsweep_after, @fullsweep_after) Tracker.track(socket.transport_pid) Logger.metadata(external_id: tenant_id, project: tenant_id) Logger.put_process_level(self(), log_level) @@ -72,12 +74,21 @@ defmodule RealtimeWeb.RealtimeChannel do {:ok, claims, confirm_token_ref} <- confirm_token(socket), socket = assign_authorization_context(socket, sub_topic, claims), {:ok, db_conn} <- Connect.lookup_or_start_connection(tenant_id), - {:ok, socket} <- maybe_assign_policies(sub_topic, db_conn, socket) do + {:ok, socket} <- maybe_assign_policies(sub_topic, db_conn, socket), + {:ok, replayed_message_ids} <- + maybe_replay_messages(params["config"], sub_topic, db_conn, tenant_id, socket.assigns.private?) do tenant_topic = Tenants.tenant_topic(tenant_id, sub_topic, !socket.assigns.private?) # fastlane subscription metadata = - MessageDispatcher.fastlane_metadata(transport_pid, serializer, topic, socket.assigns.log_level, tenant_id) + MessageDispatcher.fastlane_metadata( + transport_pid, + serializer, + topic, + log_level, + tenant_id, + replayed_message_ids + ) RealtimeWeb.Endpoint.subscribe(tenant_topic, metadata: metadata) @@ -158,6 +169,10 @@ defmodule RealtimeWeb.RealtimeChannel do msg = "Database can't accept more connections, Realtime won't connect" log_error(socket, "DatabaseLackOfConnections", msg) + {:error, :connect_rate_limit_reached} -> + msg = "Too many database connections attempts per second" + log_error(socket, "DatabaseConnectionRateLimitReached", msg) + {:error, :unable_to_set_policies, error} -> log_error(socket, "UnableToSetPolicies", error) {:error, %{reason: "Realtime was unable to connect to the project database"}} @@ -198,6 +213,15 @@ defmodule RealtimeWeb.RealtimeChannel do {:error, :shutdown_in_progress} -> log_error(socket, "RealtimeRestarting", "Realtime is restarting, please standby") + {:error, :failed_to_replay_messages} -> + log_error(socket, "UnableToReplayMessages", "Realtime was unable to replay messages") + + {:error, :invalid_replay_params} -> + log_error(socket, "UnableToReplayMessages", "Replay params are not valid") + + {:error, :invalid_replay_channel} -> + log_error(socket, "UnableToReplayMessages", "Replay is not allowed for public channels") + {:error, error} -> log_error(socket, "UnknownErrorOnChannel", error) {:error, %{reason: "Unknown Error on Channel"}} @@ -205,6 +229,17 @@ defmodule RealtimeWeb.RealtimeChannel do end @impl true + def handle_info({:replay, messages}, socket) do + for message <- messages do + meta = %{"replayed" => true, "id" => message.id} + payload = %{"payload" => message.payload, "event" => message.event, "type" => "broadcast", "meta" => meta} + + push(socket, "broadcast", payload) + end + + {:noreply, socket} + end + def handle_info(:update_rate_counter, socket) do count(socket) @@ -226,27 +261,11 @@ defmodule RealtimeWeb.RealtimeChannel do {:noreply, assign(socket, %{pg_sub_ref: pg_sub_ref})} end - def handle_info( - %{event: "presence_diff"}, - %{assigns: %{policies: %Policies{presence: %PresencePolicies{read: false}}}} = socket - ) do - Logger.warning("Presence message ignored") - {:noreply, socket} - end - def handle_info(_msg, %{assigns: %{policies: %Policies{broadcast: %BroadcastPolicies{read: false}}}} = socket) do Logger.warning("Broadcast message ignored") {:noreply, socket} end - def handle_info(%{event: "presence_diff", payload: payload} = msg, socket) do - %{presence_rate_counter: presence_rate_counter} = socket.assigns - GenCounter.add(presence_rate_counter.id) - maybe_log_info(socket, msg) - push(socket, "presence_diff", payload) - {:noreply, socket} - end - def handle_info(%{event: type, payload: payload} = msg, socket) do count(socket) maybe_log_info(socket, msg) @@ -271,13 +290,19 @@ defmodule RealtimeWeb.RealtimeChannel do case PostgresCdc.connect(module, args) do {:ok, response} -> - case PostgresCdc.after_connect(module, response, postgres_extension, pg_change_params) do + case PostgresCdc.after_connect(module, response, postgres_extension, pg_change_params, tenant) do {:ok, _response} -> message = "Subscribed to PostgreSQL" maybe_log_info(socket, message) push_system_message("postgres_changes", socket, "ok", message, channel_name) {:noreply, assign(socket, :pg_sub_ref, nil)} + {:error, {reason, error}} when reason in [:malformed_subscription_params, :subscription_insert_failed] -> + maybe_log_warning(socket, "RealtimeDisabledForConfiguration", error) + push_system_message("postgres_changes", socket, "error", error, channel_name) + # No point in retrying if the params are invalid + {:noreply, assign(socket, :pg_sub_ref, nil)} + error -> maybe_log_warning(socket, "RealtimeDisabledForConfiguration", error) @@ -369,6 +394,9 @@ defmodule RealtimeWeb.RealtimeChannel do {:error, :rate_limit_exceeded} -> shutdown_response(socket, "Too many presence messages per second") + {:error, :payload_size_exceeded} -> + shutdown_response(socket, "Track message size exceeded") + {:error, error} -> log_error(socket, "UnableToHandlePresence", error) {:reply, :error, socket} @@ -376,12 +404,15 @@ defmodule RealtimeWeb.RealtimeChannel do end def handle_in("presence", payload, %{assigns: %{private?: false}} = socket) do - with {:ok, socket} <- PresenceHandler.handle(payload, socket) do + with {:ok, socket} <- PresenceHandler.handle(payload, nil, socket) do {:reply, :ok, socket} else {:error, :rate_limit_exceeded} -> shutdown_response(socket, "Too many presence messages per second") + {:error, :payload_size_exceeded} -> + shutdown_response(socket, "Track message size exceeded") + {:error, error} -> log_error(socket, "UnableToHandlePresence", error) {:reply, :error, socket} @@ -696,7 +727,7 @@ defmodule RealtimeWeb.RealtimeChannel do end) subscription_metadata = - {:subscriber_fastlane, transport_pid, serializer, ids, topic, tenant, is_new_api} + {:subscriber_fastlane, transport_pid, serializer, ids, topic, is_new_api} metadata = [metadata: subscription_metadata] @@ -762,4 +793,34 @@ defmodule RealtimeWeb.RealtimeChannel do do: {:error, :private_only}, else: :ok end + + defp maybe_replay_messages(%{"broadcast" => %{"replay" => _}}, _sub_topic, _db_conn, _tenant_id, false = _private?) do + {:error, :invalid_replay_channel} + end + + defp maybe_replay_messages( + %{"broadcast" => %{"replay" => replay_params}}, + sub_topic, + db_conn, + tenant_id, + true = _private? + ) + when is_map(replay_params) do + with {:ok, messages, message_ids} <- + Realtime.Messages.replay( + db_conn, + tenant_id, + sub_topic, + replay_params["since"], + replay_params["limit"] || 25 + ) do + # Send to self because we can't write to the socket before finishing the join process + send(self(), {:replay, messages}) + {:ok, message_ids} + end + end + + defp maybe_replay_messages(_, _, _, _, _), do: {:ok, MapSet.new()} + + defp max_heap_size(), do: Application.fetch_env!(:realtime, :websocket_max_heap_size) end diff --git a/lib/realtime_web/channels/realtime_channel/broadcast_handler.ex b/lib/realtime_web/channels/realtime_channel/broadcast_handler.ex index f8e736c2e..a89355316 100644 --- a/lib/realtime_web/channels/realtime_channel/broadcast_handler.ex +++ b/lib/realtime_web/channels/realtime_channel/broadcast_handler.ex @@ -6,6 +6,7 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandler do import Phoenix.Socket, only: [assign: 3] + alias Realtime.Tenants alias RealtimeWeb.RealtimeChannel alias RealtimeWeb.TenantBroadcaster alias Phoenix.Socket @@ -14,11 +15,13 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandler do alias Realtime.Tenants.Authorization.Policies alias Realtime.Tenants.Authorization.Policies.BroadcastPolicies + @type payload :: map | {String.t(), :json | :binary, binary} + @event_type "broadcast" - @spec handle(map(), Socket.t()) :: {:reply, :ok, Socket.t()} | {:noreply, Socket.t()} + @spec handle(payload, Socket.t()) :: {:reply, :ok, Socket.t()} | {:noreply, Socket.t()} def handle(payload, %{assigns: %{private?: false}} = socket), do: handle(payload, nil, socket) - @spec handle(map(), pid() | nil, Socket.t()) :: {:reply, :ok, Socket.t()} | {:noreply, Socket.t()} + @spec handle(payload, pid() | nil, Socket.t()) :: {:reply, :ok, Socket.t()} | {:noreply, Socket.t()} def handle(payload, db_conn, %{assigns: %{private?: true}} = socket) do %{ assigns: %{ @@ -38,8 +41,23 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandler do |> increment_rate_counter() %{ack_broadcast: ack_broadcast} = socket.assigns - send_message(tenant_id, self_broadcast, tenant_topic, payload) - if ack_broadcast, do: {:reply, :ok, socket}, else: {:noreply, socket} + + res = + case Tenants.validate_payload_size(tenant_id, payload) do + :ok -> send_message(tenant_id, self_broadcast, tenant_topic, payload) + {:error, error} -> {:error, error} + end + + cond do + ack_broadcast && match?({:error, :payload_size_exceeded}, res) -> + {:reply, {:error, :payload_size_exceeded}, socket} + + ack_broadcast -> + {:reply, :ok, socket} + + true -> + {:noreply, socket} + end {:ok, policies} -> {:noreply, assign(socket, :policies, policies)} @@ -65,29 +83,66 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandler do } = socket socket = increment_rate_counter(socket) - send_message(tenant_id, self_broadcast, tenant_topic, payload) - if ack_broadcast, - do: {:reply, :ok, socket}, - else: {:noreply, socket} + res = + case Tenants.validate_payload_size(tenant_id, payload) do + :ok -> send_message(tenant_id, self_broadcast, tenant_topic, payload) + {:error, error} -> {:error, error} + end + + cond do + ack_broadcast && match?({:error, :payload_size_exceeded}, res) -> + {:reply, {:error, :payload_size_exceeded}, socket} + + ack_broadcast -> + {:reply, :ok, socket} + + true -> + {:noreply, socket} + end end defp send_message(tenant_id, self_broadcast, tenant_topic, payload) do - broadcast = %Phoenix.Socket.Broadcast{topic: tenant_topic, event: @event_type, payload: payload} + broadcast = build_broadcast(tenant_topic, payload) if self_broadcast do - TenantBroadcaster.pubsub_broadcast(tenant_id, tenant_topic, broadcast, RealtimeChannel.MessageDispatcher) + TenantBroadcaster.pubsub_broadcast( + tenant_id, + tenant_topic, + broadcast, + RealtimeChannel.MessageDispatcher, + :broadcast + ) else TenantBroadcaster.pubsub_broadcast_from( tenant_id, self(), tenant_topic, broadcast, - RealtimeChannel.MessageDispatcher + RealtimeChannel.MessageDispatcher, + :broadcast ) end end + # No idea why Dialyzer is complaining here + @dialyzer {:nowarn_function, build_broadcast: 2} + + # Message payload was built by V2 Serializer which was originally UserBroadcastPush + # We are not using the metadata for anything just yet. + defp build_broadcast(topic, {user_event, user_payload_encoding, user_payload, _metadata}) do + %RealtimeWeb.Socket.UserBroadcast{ + topic: topic, + user_event: user_event, + user_payload_encoding: user_payload_encoding, + user_payload: user_payload + } + end + + defp build_broadcast(topic, payload) do + %Phoenix.Socket.Broadcast{topic: topic, event: @event_type, payload: payload} + end + defp increment_rate_counter(%{assigns: %{policies: %Policies{broadcast: %BroadcastPolicies{write: false}}}} = socket) do socket end diff --git a/lib/realtime_web/channels/realtime_channel/logging.ex b/lib/realtime_web/channels/realtime_channel/logging.ex index 296dce1bc..2f6c91fdb 100644 --- a/lib/realtime_web/channels/realtime_channel/logging.ex +++ b/lib/realtime_web/channels/realtime_channel/logging.ex @@ -21,7 +21,7 @@ defmodule RealtimeWeb.RealtimeChannel.Logging do def log_error(socket, code, msg) do msg = build_msg(code, msg) emit_system_error(:error, code) - log(socket, :error, msg) + log(socket, :error, code, msg) {:error, %{reason: msg}} end @@ -32,7 +32,7 @@ defmodule RealtimeWeb.RealtimeChannel.Logging do {:error, %{reason: binary}} def log_warning(socket, code, msg) do msg = build_msg(code, msg) - log(socket, :warning, msg) + log(socket, :warning, code, msg) {:error, %{reason: msg}} end @@ -59,16 +59,16 @@ defmodule RealtimeWeb.RealtimeChannel.Logging do if code, do: "#{code}: #{msg}", else: msg end - defp log(%{assigns: %{tenant: tenant, access_token: access_token}}, level, msg) do + defp log(%{assigns: %{tenant: tenant, access_token: access_token}}, level, code, msg) do Logger.metadata(external_id: tenant, project: tenant) if level in [:error, :warning], do: update_metadata_with_token_claims(access_token) - Logger.log(level, msg) + Logger.log(level, msg, error_code: code) end defp maybe_log(%{assigns: %{log_level: log_level}} = socket, level, code, msg) do msg = build_msg(code, msg) emit_system_error(level, code) - if Logger.compare_levels(log_level, level) != :gt, do: log(socket, level, msg) + if Logger.compare_levels(log_level, level) != :gt, do: log(socket, level, code, msg) if level in [:error, :warning], do: {:error, %{reason: msg}}, else: :ok end diff --git a/lib/realtime_web/channels/realtime_channel/message_dispatcher.ex b/lib/realtime_web/channels/realtime_channel/message_dispatcher.ex index b5db97f95..ebcdb93a9 100644 --- a/lib/realtime_web/channels/realtime_channel/message_dispatcher.ex +++ b/lib/realtime_web/channels/realtime_channel/message_dispatcher.ex @@ -4,41 +4,66 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcher do """ require Logger + alias Phoenix.Socket.Broadcast + alias RealtimeWeb.Socket.UserBroadcast - def fastlane_metadata(fastlane_pid, serializer, topic, :info, tenant_id) do - {:realtime_channel_fastlane, fastlane_pid, serializer, topic, {:log, tenant_id}} + def fastlane_metadata(fastlane_pid, serializer, topic, log_level, tenant_id, replayed_message_ids \\ MapSet.new()) do + {:rc_fastlane, fastlane_pid, serializer, topic, log_level, tenant_id, replayed_message_ids} end - def fastlane_metadata(fastlane_pid, serializer, topic, _log_level, _tenant_id) do - {:realtime_channel_fastlane, fastlane_pid, serializer, topic} - end + @presence_diff "presence_diff" @doc """ This dispatch function caches encoded messages if fastlane is used It also sends an :update_rate_counter to the subscriber and it can conditionally log + + fastlane_pid is the actual socket transport pid """ - @spec dispatch(list, pid, Phoenix.Socket.Broadcast.t()) :: :ok - def dispatch(subscribers, from, %Phoenix.Socket.Broadcast{} = msg) do - # fastlane_pid is the actual socket transport pid - # This reduce caches the serialization and bypasses the channel process going straight to the - # transport process + @spec dispatch(list, pid, Broadcast.t() | UserBroadcast.t()) :: :ok + def dispatch(subscribers, from, %Broadcast{event: @presence_diff} = msg) do + {_cache, count} = + Enum.reduce(subscribers, {%{}, 0}, fn + {pid, _}, {cache, count} when pid == from -> + {cache, count} + + {_pid, {:rc_fastlane, fastlane_pid, serializer, join_topic, log_level, tenant_id, _replayed_message_ids}}, + {cache, count} -> + maybe_log(log_level, join_topic, msg, tenant_id) + + cache = do_dispatch(msg, fastlane_pid, serializer, join_topic, cache, tenant_id, log_level) + {cache, count + 1} + + {pid, _}, {cache, count} -> + send(pid, msg) + {cache, count} + end) + + tenant_id = tenant_id(subscribers) + increment_presence_counter(tenant_id, msg.event, count) + + :ok + end + + def dispatch(subscribers, from, msg) do + message_id = message_id(msg) - # Credo doesn't like that we don't use the result aggregation _ = Enum.reduce(subscribers, %{}, fn {pid, _}, cache when pid == from -> cache - {pid, {:realtime_channel_fastlane, fastlane_pid, serializer, join_topic}}, cache -> - send(pid, :update_rate_counter) - do_dispatch(msg, fastlane_pid, serializer, join_topic, cache) + {pid, {:rc_fastlane, fastlane_pid, serializer, join_topic, log_level, tenant_id, replayed_message_ids}}, + cache -> + if already_replayed?(message_id, replayed_message_ids) do + # skip already replayed message + cache + else + send(pid, :update_rate_counter) - {pid, {:realtime_channel_fastlane, fastlane_pid, serializer, join_topic, {:log, tenant_id}}}, cache -> - send(pid, :update_rate_counter) - log = "Received message on #{join_topic} with payload: #{inspect(msg, pretty: true)}" - Logger.info(log, external_id: tenant_id, project: tenant_id) + maybe_log(log_level, join_topic, msg, tenant_id) - do_dispatch(msg, fastlane_pid, serializer, join_topic, cache) + do_dispatch(msg, fastlane_pid, serializer, join_topic, cache, tenant_id, log_level) + end {pid, _}, cache -> send(pid, msg) @@ -48,18 +73,70 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcher do :ok end - defp do_dispatch(msg, fastlane_pid, serializer, join_topic, cache) do + defp maybe_log(:info, join_topic, msg, tenant_id) when is_struct(msg) do + log = "Received message on #{join_topic} with payload: #{inspect(msg, pretty: true)}" + Logger.info(log, external_id: tenant_id, project: tenant_id) + end + + defp maybe_log(:info, join_topic, msg, tenant_id) when is_binary(msg) do + log = "Received message on #{join_topic}. #{msg}" + Logger.info(log, external_id: tenant_id, project: tenant_id) + end + + defp maybe_log(_level, _join_topic, _msg, _tenant_id), do: :ok + + defp do_dispatch(msg, fastlane_pid, serializer, join_topic, cache, tenant_id, log_level) do case cache do - %{^serializer => encoded_msg} -> + %{^serializer => {:ok, encoded_msg}} -> send(fastlane_pid, encoded_msg) cache + %{^serializer => {:error, _reason}} -> + # We do nothing at this stage. It has been already logged depending on the log level + cache + %{} -> # Use the original topic that was joined without the external_id msg = %{msg | topic: join_topic} - encoded_msg = serializer.fastlane!(msg) - send(fastlane_pid, encoded_msg) - Map.put(cache, serializer, encoded_msg) + + result = + case fastlane!(serializer, msg) do + {:ok, encoded_msg} -> + send(fastlane_pid, encoded_msg) + {:ok, encoded_msg} + + {:error, reason} -> + maybe_log(log_level, join_topic, reason, tenant_id) + {:error, reason} + end + + Map.put(cache, serializer, result) + end + end + + # We have to convert because V1 does not know how to process UserBroadcast + defp fastlane!(Phoenix.Socket.V1.JSONSerializer = serializer, %UserBroadcast{} = msg) do + with {:ok, msg} <- UserBroadcast.convert_to_json_broadcast(msg) do + {:ok, serializer.fastlane!(msg)} end end + + defp fastlane!(serializer, msg), do: {:ok, serializer.fastlane!(msg)} + + defp tenant_id([{_pid, {:rc_fastlane, _, _, _, _, tenant_id, _}} | _]), do: tenant_id + defp tenant_id(_), do: nil + + defp increment_presence_counter(tenant_id, "presence_diff", count) when is_binary(tenant_id) do + tenant_id + |> Realtime.Tenants.presence_events_per_second_key() + |> Realtime.GenCounter.add(count) + end + + defp increment_presence_counter(_tenant_id, _event, _count), do: :ok + + defp message_id(%Broadcast{payload: %{"meta" => %{"id" => id}}}), do: id + defp message_id(_), do: nil + + defp already_replayed?(nil, _replayed_message_ids), do: false + defp already_replayed?(message_id, replayed_message_ids), do: MapSet.member?(replayed_message_ids, message_id) end diff --git a/lib/realtime_web/channels/realtime_channel/presence_handler.ex b/lib/realtime_web/channels/realtime_channel/presence_handler.ex index 00ce77c02..733cc838d 100644 --- a/lib/realtime_web/channels/realtime_channel/presence_handler.ex +++ b/lib/realtime_web/channels/realtime_channel/presence_handler.ex @@ -52,28 +52,27 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandler do end end - @spec handle(map(), Socket.t()) :: - {:ok, Socket.t()} | {:error, :rls_policy_error | :unable_to_set_policies | :rate_limit_exceeded} - def handle(_, %{assigns: %{presence_enabled?: false}} = socket), do: {:ok, socket} - def handle(payload, socket) when not is_private?(socket), do: handle(payload, nil, socket) - @spec handle(map(), pid() | nil, Socket.t()) :: {:ok, Socket.t()} - | {:error, :rls_policy_error | :unable_to_set_policies | :rate_limit_exceeded | :unable_to_track_presence} - def handle(_, _, %{assigns: %{presence_enabled?: false}} = socket), do: {:ok, socket} - + | {:error, + :rls_policy_error + | :unable_to_set_policies + | :rate_limit_exceeded + | :unable_to_track_presence + | :payload_size_exceeded} def handle(%{"event" => event} = payload, db_conn, socket) do event = String.downcase(event, :ascii) handle_presence_event(event, payload, db_conn, socket) end - def handle(_payload, _db_conn, socket), do: {:ok, socket} + def handle(_, _, socket), do: {:ok, socket} - defp handle_presence_event("track", payload, _db_conn, socket) when not is_private?(socket) do + defp handle_presence_event("track", payload, _, socket) when not is_private?(socket) do track(socket, payload) end - defp handle_presence_event("track", payload, db_conn, socket) when is_nil(socket.assigns.policies.presence.write) do + defp handle_presence_event("track", payload, db_conn, socket) + when is_private?(socket) and is_nil(socket.assigns.policies.presence.write) do %{assigns: %{authorization_context: authorization_context, policies: policies}} = socket case Authorization.get_write_authorizations(policies, db_conn, authorization_context) do @@ -102,7 +101,7 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandler do defp handle_presence_event("untrack", _, _, socket) do %{assigns: %{presence_key: presence_key, tenant_topic: tenant_topic}} = socket :ok = Presence.untrack(self(), tenant_topic, presence_key) - {:ok, socket} + {:ok, assign(socket, :presence_track_payload, nil)} end defp handle_presence_event(event, _, _, _) do @@ -114,25 +113,53 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandler do %{assigns: %{presence_key: presence_key, tenant_topic: tenant_topic}} = socket payload = Map.get(payload, "payload", %{}) - with :ok <- limit_presence_event(socket), + with :ok <- check_track_payload(socket.assigns, payload), + tenant <- Tenants.Cache.get_tenant_by_external_id(socket.assigns.tenant), + :ok <- validate_payload_size(tenant, payload), + _ <- RealtimeWeb.TenantBroadcaster.collect_payload_size(socket.assigns.tenant, payload, :presence), + :ok <- limit_presence_event(socket), {:ok, _} <- Presence.track(self(), tenant_topic, presence_key, payload) do + socket = + socket + |> assign(:presence_enabled?, true) + |> assign(:presence_track_payload, payload) + {:ok, socket} else + {:error, :no_payload_change} -> + # no-op if payload hasn't changed + {:ok, socket} + {:error, {:already_tracked, pid, _, _}} -> case Presence.update(pid, tenant_topic, presence_key, payload) do - {:ok, _} -> {:ok, socket} - {:error, _} -> {:error, :unable_to_track_presence} + {:ok, _} -> + socket = assign(socket, :presence_track_payload, payload) + {:ok, socket} + + {:error, _} -> + {:error, :unable_to_track_presence} end {:error, :rate_limit_exceeded} -> {:error, :rate_limit_exceeded} + {:error, :payload_size_exceeded} -> + {:error, :payload_size_exceeded} + {:error, error} -> log_error("UnableToTrackPresence", error) {:error, :unable_to_track_presence} end end + defp check_track_payload(assigns, new_payload) do + if assigns[:presence_track_payload] != new_payload do + :ok + else + {:error, :no_payload_change} + end + end + defp presence_dirty_list(topic) do [{:pool_size, size}] = :ets.lookup(Presence, :pool_size) @@ -143,10 +170,9 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandler do end defp limit_presence_event(socket) do - %{assigns: %{presence_rate_counter: presence_counter, tenant: tenant_id}} = socket + %{assigns: %{presence_rate_counter: presence_counter, tenant: _tenant_id}} = socket {:ok, rate_counter} = RateCounter.get(presence_counter) - - tenant = Tenants.Cache.get_tenant_by_external_id(tenant_id) + tenant = Tenants.Cache.get_tenant_by_external_id(socket.assigns.tenant) if rate_counter.avg > tenant.max_presence_events_per_second do {:error, :rate_limit_exceeded} @@ -155,4 +181,6 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandler do :ok end end + + defp validate_payload_size(tenant, payload), do: Tenants.validate_payload_size(tenant, payload) end diff --git a/lib/realtime_web/channels/tenant_rate_limiters.ex b/lib/realtime_web/channels/tenant_rate_limiters.ex new file mode 100644 index 000000000..2101ac945 --- /dev/null +++ b/lib/realtime_web/channels/tenant_rate_limiters.ex @@ -0,0 +1,43 @@ +defmodule RealtimeWeb.TenantRateLimiters do + @moduledoc """ + Rate limiters for tenants. + """ + require Logger + alias Realtime.UsersCounter + alias Realtime.Tenants + alias Realtime.RateCounter + alias Realtime.Api.Tenant + + @spec check_tenant(Realtime.Api.Tenant.t()) :: :ok | {:error, :too_many_connections | :too_many_joins} + def check_tenant(tenant) do + with :ok <- max_concurrent_users_check(tenant) do + max_joins_per_second_check(tenant) + end + end + + defp max_concurrent_users_check(%Tenant{max_concurrent_users: max_conn_users, external_id: external_id}) do + total_conn_users = UsersCounter.tenant_users(external_id) + + if total_conn_users < max_conn_users, + do: :ok, + else: {:error, :too_many_connections} + end + + defp max_joins_per_second_check(%Tenant{max_joins_per_second: max_joins_per_second} = tenant) do + rate_args = Tenants.joins_per_second_rate(tenant.external_id, max_joins_per_second) + + RateCounter.new(rate_args) + + case RateCounter.get(rate_args) do + {:ok, %{limit: %{triggered: false}}} -> + :ok + + {:ok, %{limit: %{triggered: true}}} -> + {:error, :too_many_joins} + + error -> + Logger.error("UnknownErrorOnCounter: #{inspect(error)}") + {:error, error} + end + end +end diff --git a/lib/realtime_web/channels/user_socket.ex b/lib/realtime_web/channels/user_socket.ex index 09dd15906..0966c3cd7 100644 --- a/lib/realtime_web/channels/user_socket.ex +++ b/lib/realtime_web/channels/user_socket.ex @@ -1,4 +1,28 @@ defmodule RealtimeWeb.UserSocket do + # This is defined up here before `use Phoenix.Socket` is called so that we can define `Phoenix.Socket.init/1` + # It has to be overridden because we need to set the `max_heap_size` flag from the transport process context + @impl true + def init(state) when is_tuple(state) do + Process.flag(:max_heap_size, max_heap_size()) + Process.send_after(self(), {:measure_traffic, 0, 0}, measure_traffic_interval_in_ms()) + Phoenix.Socket.__init__(state) + end + + @impl true + def handle_info( + {:measure_traffic, previous_recv, previous_send}, + {_, %{assigns: assigns, transport_pid: transport_pid}} = state + ) do + tenant_external_id = Map.get(assigns, :tenant) + + %{latest_recv: latest_recv, latest_send: latest_send} = + collect_traffic_telemetry(transport_pid, tenant_external_id, previous_recv, previous_send) + + Process.send_after(self(), {:measure_traffic, latest_recv, latest_send}, measure_traffic_interval_in_ms()) + + {:ok, state} + end + use Phoenix.Socket use Realtime.Logs @@ -8,6 +32,7 @@ defmodule RealtimeWeb.UserSocket do alias Realtime.PostgresCdc alias Realtime.Tenants + alias RealtimeWeb.TenantRateLimiters alias RealtimeWeb.ChannelsAuthorization alias RealtimeWeb.RealtimeChannel alias RealtimeWeb.RealtimeChannel.Logging @@ -48,6 +73,7 @@ defmodule RealtimeWeb.UserSocket do token when is_binary(token) <- token, jwt_secret_dec <- Crypto.decrypt!(jwt_secret), {:ok, claims} <- ChannelsAuthorization.authorize_conn(token, jwt_secret_dec, jwt_jwks), + :ok <- TenantRateLimiters.check_tenant(tenant), {:ok, postgres_cdc_module} <- PostgresCdc.driver(postgres_cdc_default) do %Tenant{ extensions: extensions, @@ -103,6 +129,16 @@ defmodule RealtimeWeb.UserSocket do log_error("MalformedJWT", "The token provided is not a valid JWT") {:error, :token_malformed} + {:error, :too_many_connections} -> + msg = "Too many connected users" + Logging.log_error(socket, "ConnectionRateLimitReached", msg) + {:error, :too_many_connections} + + {:error, :too_many_joins} -> + msg = "Too many joins per second" + Logging.log_error(socket, "JoinsRateLimitReached", msg) + {:error, :too_many_joins} + error -> log_error("ErrorConnectingToWebsocket", error) error @@ -122,4 +158,40 @@ defmodule RealtimeWeb.UserSocket do _ -> @default_log_level end end + + defp max_heap_size(), do: Application.fetch_env!(:realtime, :websocket_max_heap_size) + defp measure_traffic_interval_in_ms(), do: Application.fetch_env!(:realtime, :measure_traffic_interval_in_ms) + + defp collect_traffic_telemetry(nil, _tenant_external_id, _previous_recv, _previous_send), do: 0 + + defp collect_traffic_telemetry(transport_pid, tenant_external_id, previous_recv, previous_send) do + %{send_oct: latest_send, recv_oct: latest_recv} = + transport_pid + |> Process.info(:links) + |> then(fn {:links, links} -> links end) + |> Enum.filter(&is_port/1) + |> Enum.reduce(%{send_oct: 0, recv_oct: 0}, fn link, acc -> + case :inet.getstat(link, [:send_oct, :recv_oct]) do + {:ok, stats} -> + send_oct = Keyword.get(stats, :send_oct, 0) + recv_oct = Keyword.get(stats, :recv_oct, 0) + + %{ + send_oct: acc.send_oct + send_oct, + recv_oct: acc.recv_oct + recv_oct + } + + {:error, _} -> + acc + end + end) + + send_delta = max(0, latest_send - previous_send) + recv_delta = max(0, latest_recv - previous_recv) + + :telemetry.execute([:realtime, :channel, :output_bytes], %{size: send_delta}, %{tenant: tenant_external_id}) + :telemetry.execute([:realtime, :channel, :input_bytes], %{size: recv_delta}, %{tenant: tenant_external_id}) + + %{latest_recv: latest_recv, latest_send: latest_send} + end end diff --git a/lib/realtime_web/controllers/fallback_controller.ex b/lib/realtime_web/controllers/fallback_controller.ex index d83d1d681..f379a5aa9 100644 --- a/lib/realtime_web/controllers/fallback_controller.ex +++ b/lib/realtime_web/controllers/fallback_controller.ex @@ -29,13 +29,6 @@ defmodule RealtimeWeb.FallbackController do |> render("error.json", changeset: changeset) end - def call(conn, {:error, _}) do - conn - |> put_status(:unauthorized) - |> put_view(RealtimeWeb.ErrorView) - |> render("error.json", message: "Unauthorized") - end - def call(conn, {:error, status, message}) when is_atom(status) and is_binary(message) do log_error("UnprocessableEntity", message) @@ -45,7 +38,7 @@ defmodule RealtimeWeb.FallbackController do |> render("error.json", message: message) end - def call(conn, %Ecto.Changeset{valid?: true} = changeset) do + def call(conn, {:error, %Ecto.Changeset{valid?: false} = changeset}) do log_error( "UnprocessableEntity", Ecto.Changeset.traverse_errors(changeset, &translate_error/1) @@ -57,6 +50,13 @@ defmodule RealtimeWeb.FallbackController do |> render("error.json", changeset: changeset) end + def call(conn, {:error, _}) do + conn + |> put_status(:unauthorized) + |> put_view(RealtimeWeb.ErrorView) + |> render("error.json", message: "Unauthorized") + end + def call(conn, %Ecto.Changeset{valid?: false} = changeset) do log_error( "UnprocessableEntity", diff --git a/lib/realtime_web/controllers/metrics_controller.ex b/lib/realtime_web/controllers/metrics_controller.ex index 19509e21b..61b6fd613 100644 --- a/lib/realtime_web/controllers/metrics_controller.ex +++ b/lib/realtime_web/controllers/metrics_controller.ex @@ -4,39 +4,69 @@ defmodule RealtimeWeb.MetricsController do alias Realtime.PromEx alias Realtime.GenRpc + # We give more memory and time to collect metrics from all nodes as this is a lot of work def index(conn, _) do - timeout = Application.fetch_env!(:realtime, :metrics_rpc_timeout) + conn = + conn + |> put_resp_content_type("text/plain") + |> send_chunked(200) + + {time, conn} = :timer.tc(fn -> metrics([Node.self() | Node.list()], conn) end, :millisecond) + Logger.info("Collected cluster metrics in #{time} milliseconds") + + conn + end + + def region(conn, %{"region" => region}) do + conn = + conn + |> put_resp_content_type("text/plain") + |> send_chunked(200) - cluster_metrics = - Node.list() - |> Task.async_stream( - fn node -> - {node, GenRpc.call(node, PromEx, :get_compressed_metrics, [], timeout: timeout)} - end, - timeout: :infinity - ) - |> Enum.reduce(PromEx.get_metrics(), fn {_, {node, response}}, acc -> - case response do - {:error, :rpc_error, reason} -> - Logger.error("Cannot fetch metrics from the node #{inspect(node)} because #{inspect(reason)}") - acc - - metrics -> - acc <> uncompress(metrics) - end - end) + nodes = Realtime.Nodes.region_nodes(region) + + {time, conn} = :timer.tc(fn -> metrics(nodes, conn) end, :millisecond) + Logger.info("Collected metrics for region #{region} in #{time} milliseconds") conn - |> put_resp_content_type("text/plain") - |> send_resp(200, cluster_metrics) end - defp uncompress(compressed_data) do - :zlib.uncompress(compressed_data) - rescue - error -> - Logger.error("Failed to decompress metrics data: #{inspect(error)}") - # Return empty string to not impact the aggregated metrics - "" + defp metrics(nodes, conn) do + bump_max_heap_size() + timeout = Application.fetch_env!(:realtime, :metrics_rpc_timeout) + + nodes + |> Task.async_stream( + fn node -> + {node, GenRpc.call(node, __MODULE__, :get_metrics, [], timeout: timeout)} + end, + timeout: :infinity + ) + |> Enum.reduce(conn, fn {_, {node, response}}, acc_conn -> + case response do + {:error, :rpc_error, reason} -> + Logger.error("Cannot fetch metrics from the node #{inspect(node)} because #{inspect(reason)}") + acc_conn + + metrics -> + {:ok, acc_conn} = chunk(acc_conn, metrics) + :erlang.garbage_collect() + acc_conn + end + end) + end + + def get_metrics() do + bump_max_heap_size() + PromEx.get_metrics() + end + + defp bump_max_heap_size() do + system_max_heap_size = :erlang.system_info(:max_heap_size)[:size] + + # it's 0 when there is no limit + if is_integer(system_max_heap_size) and system_max_heap_size > 0 do + Process.flag(:max_heap_size, system_max_heap_size * 3) + end end end diff --git a/lib/realtime_web/controllers/tenant_controller.ex b/lib/realtime_web/controllers/tenant_controller.ex index 4beb6f209..5444eee69 100644 --- a/lib/realtime_web/controllers/tenant_controller.ex +++ b/lib/realtime_web/controllers/tenant_controller.ex @@ -137,7 +137,7 @@ defmodule RealtimeWeb.TenantController do ) def update(conn, %{"tenant_id" => external_id, "tenant" => tenant_params}) do - tenant = Api.get_tenant_by_external_id(external_id) + tenant = Api.get_tenant_by_external_id(external_id, use_replica?: false) case tenant do nil -> @@ -160,7 +160,7 @@ defmodule RealtimeWeb.TenantController do end tenant -> - with {:ok, %Tenant{} = tenant} <- Api.update_tenant(tenant, tenant_params) do + with {:ok, %Tenant{} = tenant} <- Api.update_tenant_by_external_id(tenant.external_id, tenant_params) do conn |> put_status(:ok) |> put_resp_header("location", Routes.tenant_path(conn, :show, tenant)) @@ -192,10 +192,10 @@ defmodule RealtimeWeb.TenantController do def delete(conn, %{"tenant_id" => tenant_id}) do stop_all_timeout = Enum.count(PostgresCdc.available_drivers()) * 1_000 - with %Tenant{} = tenant <- Api.get_tenant_by_external_id(tenant_id, :primary), + with %Tenant{} = tenant <- Api.get_tenant_by_external_id(tenant_id, use_replica: false), _ <- Tenants.suspend_tenant_by_external_id(tenant_id), true <- Api.delete_tenant_by_external_id(tenant_id), - true <- Cache.distributed_invalidate_tenant_cache(tenant_id), + :ok <- Cache.distributed_invalidate_tenant_cache(tenant_id), :ok <- PostgresCdc.stop_all(tenant, stop_all_timeout), :ok <- Database.replication_slot_teardown(tenant) do send_resp(conn, 204, "") @@ -231,7 +231,7 @@ defmodule RealtimeWeb.TenantController do ) def reload(conn, %{"tenant_id" => tenant_id}) do - case Tenants.get_tenant_by_external_id(tenant_id) do + case Api.get_tenant_by_external_id(tenant_id, use_replica?: false) do nil -> log_error("TenantNotFound", "Tenant not found") diff --git a/lib/realtime_web/endpoint.ex b/lib/realtime_web/endpoint.ex index 917ab65b9..ac8e58054 100644 --- a/lib/realtime_web/endpoint.ex +++ b/lib/realtime_web/endpoint.ex @@ -11,14 +11,25 @@ defmodule RealtimeWeb.Endpoint do signing_salt: "5OUq5X4H" ] + @fullsweep_after Application.compile_env!(:realtime, :websocket_fullsweep_after) + socket "/socket", RealtimeWeb.UserSocket, websocket: [ connect_info: [:peer_data, :uri, :x_headers], - fullsweep_after: 20, - max_frame_size: 8_000_000, + fullsweep_after: @fullsweep_after, + max_frame_size: 5_000_000, + # https://github.com/ninenines/cowboy/blob/24d32de931a0c985ff7939077463fc8be939f0e9/doc/src/manual/cowboy_websocket.asciidoc#L228 + # active_n: The number of packets Cowboy will request from the socket at once. + # This can be used to tweak the performance of the server. Higher values reduce + # the number of times Cowboy need to request more packets from the port driver at + # the expense of potentially higher memory being used. + active_n: 100, + # Skip validating UTF8 for faster frame processing. + # Currently all text frames are handled only with JSON which already requires UTF-8 + validate_utf8: false, serializer: [ {Phoenix.Socket.V1.JSONSerializer, "~> 1.0.0"}, - {Phoenix.Socket.V2.JSONSerializer, "~> 2.0.0"} + {RealtimeWeb.Socket.V2Serializer, "~> 2.0.0"} ] ], longpoll: [ diff --git a/lib/realtime_web/live/status_live/index.ex b/lib/realtime_web/live/status_live/index.ex index 8a2d32054..f55eddfa5 100644 --- a/lib/realtime_web/live/status_live/index.ex +++ b/lib/realtime_web/live/status_live/index.ex @@ -3,11 +3,18 @@ defmodule RealtimeWeb.StatusLive.Index do alias Realtime.Latency.Payload alias Realtime.Nodes + alias RealtimeWeb.Endpoint @impl true def mount(_params, _session, socket) do - if connected?(socket), do: RealtimeWeb.Endpoint.subscribe("admin:cluster") - {:ok, assign(socket, pings: default_pings(), nodes: Enum.count(all_nodes()))} + if connected?(socket), do: Endpoint.subscribe("admin:cluster") + + socket = + socket + |> assign(nodes: Enum.count(all_nodes())) + |> stream(:pings, default_pings()) + + {:ok, socket} end @impl true @@ -17,17 +24,14 @@ defmodule RealtimeWeb.StatusLive.Index do @impl true def handle_info(%Phoenix.Socket.Broadcast{payload: %Payload{} = payload}, socket) do - pair = payload.from_node <> "_" <> payload.node - payload = %{pair => payload} - - pings = Map.merge(socket.assigns.pings, payload) + pair = pair_id(payload.from_node, payload.node) - {:noreply, assign(socket, pings: pings)} + {:noreply, stream(socket, :pings, [%{id: pair, payload: payload}])} end defp apply_action(socket, :index, _params) do socket - |> assign(:page_title, "Status - Supabase Realtime") + |> assign(:page_title, "Realtime Status") end defp all_nodes do @@ -35,9 +39,14 @@ defmodule RealtimeWeb.StatusLive.Index do end defp default_pings do - for n <- all_nodes(), f <- all_nodes(), into: %{} do - pair = n <> "_" <> f - {pair, %Payload{from_node: f, latency: "Loading...", node: n, timestamp: "Loading..."}} + for n <- all_nodes(), f <- all_nodes() do + pair = pair_id(f, n) + + %{id: pair, payload: %Payload{from_node: f, latency: "Loading...", node: n, timestamp: "Loading..."}} end end + + defp pair_id(from, to) do + from <> "_" <> to + end end diff --git a/lib/realtime_web/live/status_live/index.html.heex b/lib/realtime_web/live/status_live/index.html.heex index 645001714..63ea4fc0d 100644 --- a/lib/realtime_web/live/status_live/index.html.heex +++ b/lib/realtime_web/live/status_live/index.html.heex @@ -1,16 +1,16 @@ <.h1>Supabase Realtime: Multiplayer Edition + <.h2>Cluster Status +

Understand the latency between nodes across the Realtime cluster.

-
- <%= for {_pair, p} <- @pings do %> -
-
From: <%= p.from_region %> - <%= p.from_node %>
-
To: <%= p.region %> - <%= p.node %>
-
<%= p.latency %> ms
-
<%= p.timestamp %>
-
- <% end %> +
+
+
From: <%= p.payload.from_region %> - <%= p.payload.from_node %>
+
To: <%= p.payload.region %> - <%= p.payload.node %>
+
<%= p.payload.latency %> ms
+
<%= p.payload.timestamp %>
+
diff --git a/lib/realtime_web/plugs/assign_tenant.ex b/lib/realtime_web/plugs/assign_tenant.ex index 69b52e8ab..b60d3e28a 100644 --- a/lib/realtime_web/plugs/assign_tenant.ex +++ b/lib/realtime_web/plugs/assign_tenant.ex @@ -20,7 +20,7 @@ defmodule RealtimeWeb.Plugs.AssignTenant do def call(%Plug.Conn{host: host} = conn, _opts) do with {:ok, external_id} <- Database.get_external_id(host), - %Tenant{} = tenant <- Api.get_tenant_by_external_id(external_id) do + %Tenant{} = tenant <- Api.get_tenant_by_external_id(external_id, use_replica?: true) do Logger.metadata(external_id: external_id, project: external_id) OpenTelemetry.Tracer.set_attributes(external_id: external_id) diff --git a/lib/realtime_web/plugs/auth_tenant.ex b/lib/realtime_web/plugs/auth_tenant.ex index 11bf2e0bc..23c0581a8 100644 --- a/lib/realtime_web/plugs/auth_tenant.ex +++ b/lib/realtime_web/plugs/auth_tenant.ex @@ -42,6 +42,9 @@ defmodule RealtimeWeb.AuthTenant do [] -> nil + [""] -> + nil + [value | _] -> [bearer, token] = value |> String.split(" ") bearer = String.downcase(bearer) diff --git a/lib/realtime_web/router.ex b/lib/realtime_web/router.ex index 1e368f6d2..77aded263 100644 --- a/lib/realtime_web/router.ex +++ b/lib/realtime_web/router.ex @@ -76,6 +76,7 @@ defmodule RealtimeWeb.Router do pipe_through(:metrics) get("/", MetricsController, :index) + get("/:region", MetricsController, :region) end scope "/api" do diff --git a/lib/realtime_web/socket/user_broadcast.ex b/lib/realtime_web/socket/user_broadcast.ex new file mode 100644 index 000000000..7caba33ce --- /dev/null +++ b/lib/realtime_web/socket/user_broadcast.ex @@ -0,0 +1,39 @@ +defmodule RealtimeWeb.Socket.UserBroadcast do + @moduledoc """ + Defines a message sent from pubsub to channels and vice-versa. + + The message format requires the following keys: + + * `:topic` - The string topic or topic:subtopic pair namespace, for example "messages", "messages:123" + * `:user_event`- The string user event name, for example "my-event" + * `:user_payload_encoding`- :json or :binary + * `:user_payload` - The actual message payload + + Optionally metadata which is a map to be JSON encoded + """ + + alias Phoenix.Socket.Broadcast + + @type t :: %__MODULE__{} + defstruct topic: nil, user_event: nil, user_payload: nil, user_payload_encoding: nil, metadata: nil + + @spec convert_to_json_broadcast(t) :: {:ok, Broadcast.t()} | {:error, String.t()} + def convert_to_json_broadcast(%__MODULE__{user_payload_encoding: :json} = user_broadcast) do + payload = %{ + "event" => user_broadcast.user_event, + "payload" => Jason.Fragment.new(user_broadcast.user_payload), + "type" => "broadcast" + } + + payload = + if user_broadcast.metadata do + Map.put(payload, "meta", user_broadcast.metadata) + else + payload + end + + {:ok, %Broadcast{event: "broadcast", payload: payload, topic: user_broadcast.topic}} + end + + def convert_to_json_broadcast(%__MODULE__{}), do: {:error, "User payload encoding is not JSON"} +end diff --git a/lib/realtime_web/socket/v2_serializer.ex b/lib/realtime_web/socket/v2_serializer.ex new file mode 100644 index 000000000..ff50dab5d --- /dev/null +++ b/lib/realtime_web/socket/v2_serializer.ex @@ -0,0 +1,232 @@ +defmodule RealtimeWeb.Socket.V2Serializer do + @moduledoc """ + Custom serializer that is a superset of Phoenix's V2 JSONSerializer + that handles user broadcast and user broadcast push + """ + + @behaviour Phoenix.Socket.Serializer + + @push 0 + @reply 1 + @broadcast 2 + @user_broadcast_push 3 + @user_broadcast 4 + + alias Phoenix.Socket.{Message, Reply, Broadcast} + alias RealtimeWeb.Socket.UserBroadcast + + @impl true + def fastlane!(%UserBroadcast{} = msg) do + metadata = + if msg.metadata do + Phoenix.json_library().encode!(msg.metadata) + else + msg.metadata + end + + topic_size = byte_size!(msg.topic, :topic, 255) + user_event_size = byte_size!(msg.user_event, :user_event, 255) + metadata_size = byte_size!(metadata, :metadata, 255) + user_payload_encoding = if msg.user_payload_encoding == :json, do: 1, else: 0 + + bin = << + @user_broadcast::size(8), + topic_size::size(8), + user_event_size::size(8), + metadata_size::size(8), + user_payload_encoding::size(8), + msg.topic::binary-size(topic_size), + msg.user_event::binary-size(user_event_size), + metadata || <<>>::binary-size(metadata_size), + msg.user_payload::binary + >> + + {:socket_push, :binary, bin} + end + + def fastlane!(%Broadcast{payload: {:binary, data}} = msg) do + topic_size = byte_size!(msg.topic, :topic, 255) + event_size = byte_size!(msg.event, :event, 255) + + bin = << + @broadcast::size(8), + topic_size::size(8), + event_size::size(8), + msg.topic::binary-size(topic_size), + msg.event::binary-size(event_size), + data::binary + >> + + {:socket_push, :binary, bin} + end + + def fastlane!(%Broadcast{payload: %{}} = msg) do + data = Phoenix.json_library().encode_to_iodata!([nil, nil, msg.topic, msg.event, msg.payload]) + {:socket_push, :text, data} + end + + def fastlane!(%Broadcast{payload: invalid}) do + raise ArgumentError, "expected broadcasted payload to be a map, got: #{inspect(invalid)}" + end + + @impl true + def encode!(%Reply{payload: {:binary, data}} = reply) do + status = to_string(reply.status) + join_ref = to_string(reply.join_ref) + ref = to_string(reply.ref) + join_ref_size = byte_size!(join_ref, :join_ref, 255) + ref_size = byte_size!(ref, :ref, 255) + topic_size = byte_size!(reply.topic, :topic, 255) + status_size = byte_size!(status, :status, 255) + + bin = << + @reply::size(8), + join_ref_size::size(8), + ref_size::size(8), + topic_size::size(8), + status_size::size(8), + join_ref::binary-size(join_ref_size), + ref::binary-size(ref_size), + reply.topic::binary-size(topic_size), + status::binary-size(status_size), + data::binary + >> + + {:socket_push, :binary, bin} + end + + def encode!(%Reply{} = reply) do + data = [ + reply.join_ref, + reply.ref, + reply.topic, + "phx_reply", + %{status: reply.status, response: reply.payload} + ] + + {:socket_push, :text, Phoenix.json_library().encode_to_iodata!(data)} + end + + def encode!(%Message{payload: {:binary, data}} = msg) do + join_ref = to_string(msg.join_ref) + join_ref_size = byte_size!(join_ref, :join_ref, 255) + topic_size = byte_size!(msg.topic, :topic, 255) + event_size = byte_size!(msg.event, :event, 255) + + bin = << + @push::size(8), + join_ref_size::size(8), + topic_size::size(8), + event_size::size(8), + join_ref::binary-size(join_ref_size), + msg.topic::binary-size(topic_size), + msg.event::binary-size(event_size), + data::binary + >> + + {:socket_push, :binary, bin} + end + + def encode!(%Message{payload: %{}} = msg) do + data = [msg.join_ref, msg.ref, msg.topic, msg.event, msg.payload] + {:socket_push, :text, Phoenix.json_library().encode_to_iodata!(data)} + end + + def encode!(%Message{payload: invalid}) do + raise ArgumentError, "expected payload to be a map, got: #{inspect(invalid)}" + end + + @impl true + def decode!(raw_message, opts) do + case Keyword.fetch(opts, :opcode) do + {:ok, :text} -> decode_text(raw_message) + {:ok, :binary} -> decode_binary(raw_message) + end + end + + defp decode_text(raw_message) do + [join_ref, ref, topic, event, payload | _] = Phoenix.json_library().decode!(raw_message) + + %Message{ + topic: topic, + event: event, + payload: payload, + ref: ref, + join_ref: join_ref + } + end + + defp decode_binary(<< + @push::size(8), + join_ref_size::size(8), + ref_size::size(8), + topic_size::size(8), + event_size::size(8), + join_ref::binary-size(join_ref_size), + ref::binary-size(ref_size), + topic::binary-size(topic_size), + event::binary-size(event_size), + data::binary + >>) do + %Message{ + topic: topic, + event: event, + payload: {:binary, data}, + ref: ref, + join_ref: join_ref + } + end + + defp decode_binary(<< + @user_broadcast_push::size(8), + join_ref_size::size(8), + ref_size::size(8), + topic_size::size(8), + user_event_size::size(8), + metadata_size::size(8), + user_payload_encoding::size(8), + join_ref::binary-size(join_ref_size), + ref::binary-size(ref_size), + topic::binary-size(topic_size), + user_event::binary-size(user_event_size), + metadata::binary-size(metadata_size), + user_payload::binary + >>) do + user_payload_encoding = if user_payload_encoding == 0, do: :binary, else: :json + + metadata = + if metadata_size > 0 do + Phoenix.json_library().decode!(metadata) + else + %{} + end + + # Encoding as Message because that's how Phoenix Socket and Channel.Server expects things to show up + # Here we abuse the payload field to carry a tuple of (user_event, user payload encoding, user payload, metadata) + %Message{ + topic: topic, + event: "broadcast", + payload: {user_event, user_payload_encoding, user_payload, metadata}, + ref: ref, + join_ref: join_ref + } + end + + defp byte_size!(nil, _kind, _max), do: 0 + + defp byte_size!(bin, kind, max) do + case byte_size(bin) do + size when size <= max -> + size + + oversized -> + raise ArgumentError, """ + unable to convert #{kind} to binary. + + #{inspect(bin)} + + must be less than or equal to #{max} bytes, but is #{oversized} bytes. + """ + end + end +end diff --git a/lib/realtime_web/tenant_broadcaster.ex b/lib/realtime_web/tenant_broadcaster.ex index ee8646614..b1b878b5d 100644 --- a/lib/realtime_web/tenant_broadcaster.ex +++ b/lib/realtime_web/tenant_broadcaster.ex @@ -5,11 +5,49 @@ defmodule RealtimeWeb.TenantBroadcaster do alias Phoenix.PubSub - @spec pubsub_broadcast(tenant_id :: String.t(), PubSub.topic(), PubSub.message(), PubSub.dispatcher()) :: :ok - def pubsub_broadcast(tenant_id, topic, message, dispatcher) do - collect_payload_size(tenant_id, message) + @type message_type :: :broadcast | :presence | :postgres_changes - Realtime.GenRpc.multicast(PubSub, :local_broadcast, [Realtime.PubSub, topic, message, dispatcher], key: topic) + @spec pubsub_direct_broadcast( + node :: node(), + tenant_id :: String.t(), + PubSub.topic(), + PubSub.message(), + PubSub.dispatcher(), + message_type + ) :: + :ok + def pubsub_direct_broadcast(node, tenant_id, topic, message, dispatcher, message_type) do + collect_payload_size(tenant_id, message, message_type) + + do_direct_broadcast(node, topic, message, dispatcher) + + :ok + end + + # Remote + defp do_direct_broadcast(node, topic, message, dispatcher) when node != node() do + if pubsub_adapter() == :gen_rpc do + PubSub.direct_broadcast(node, Realtime.PubSub, topic, message, dispatcher) + else + Realtime.GenRpc.cast(node, PubSub, :local_broadcast, [Realtime.PubSub, topic, message, dispatcher], key: topic) + end + end + + # Local + defp do_direct_broadcast(_node, topic, message, dispatcher) do + PubSub.local_broadcast(Realtime.PubSub, topic, message, dispatcher) + end + + @spec pubsub_broadcast(tenant_id :: String.t(), PubSub.topic(), PubSub.message(), PubSub.dispatcher(), message_type) :: + :ok + def pubsub_broadcast(tenant_id, topic, message, dispatcher, message_type) do + collect_payload_size(tenant_id, message, message_type) + + if pubsub_adapter() == :gen_rpc do + PubSub.broadcast(Realtime.PubSub, topic, message, dispatcher) + else + Realtime.GenRpc.multicast(PubSub, :local_broadcast, [Realtime.PubSub, topic, message, dispatcher], key: topic) + end :ok end @@ -19,30 +57,41 @@ defmodule RealtimeWeb.TenantBroadcaster do from :: pid, PubSub.topic(), PubSub.message(), - PubSub.dispatcher() + PubSub.dispatcher(), + message_type ) :: :ok - def pubsub_broadcast_from(tenant_id, from, topic, message, dispatcher) do - collect_payload_size(tenant_id, message) + def pubsub_broadcast_from(tenant_id, from, topic, message, dispatcher, message_type) do + collect_payload_size(tenant_id, message, message_type) - Realtime.GenRpc.multicast( - PubSub, - :local_broadcast_from, - [Realtime.PubSub, from, topic, message, dispatcher], - key: topic - ) + if pubsub_adapter() == :gen_rpc do + PubSub.broadcast_from(Realtime.PubSub, from, topic, message, dispatcher) + else + Realtime.GenRpc.multicast( + PubSub, + :local_broadcast_from, + [Realtime.PubSub, from, topic, message, dispatcher], + key: topic + ) + end :ok end @payload_size_event [:realtime, :tenants, :payload, :size] - defp collect_payload_size(tenant_id, payload) when is_struct(payload) do + @spec collect_payload_size(tenant_id :: String.t(), payload :: term, message_type :: message_type) :: :ok + def collect_payload_size(tenant_id, payload, message_type) when is_struct(payload) do # Extracting from struct so the __struct__ bit is not calculated as part of the payload - collect_payload_size(tenant_id, Map.from_struct(payload)) + collect_payload_size(tenant_id, Map.from_struct(payload), message_type) end - defp collect_payload_size(tenant_id, payload) do - :telemetry.execute(@payload_size_event, %{size: :erlang.external_size(payload)}, %{tenant: tenant_id}) + def collect_payload_size(tenant_id, payload, message_type) do + :telemetry.execute(@payload_size_event, %{size: :erlang.external_size(payload)}, %{ + tenant: tenant_id, + message_type: message_type + }) end + + defp pubsub_adapter, do: Application.fetch_env!(:realtime, :pubsub_adapter) end diff --git a/mix.exs b/mix.exs index d0f8a267b..9fb7c80a3 100644 --- a/mix.exs +++ b/mix.exs @@ -4,8 +4,8 @@ defmodule Realtime.MixProject do def project do [ app: :realtime, - version: "2.46.2", - elixir: "~> 1.17.3", + version: "2.70.0", + elixir: "~> 1.18", elixirc_paths: elixirc_paths(Mix.env()), start_permanent: Mix.env() == :prod, aliases: aliases(), @@ -53,7 +53,7 @@ defmodule Realtime.MixProject do # Type `mix help deps` for examples and options. defp deps do [ - {:phoenix, "~> 1.7.0"}, + {:phoenix, override: true, github: "supabase/phoenix", branch: "feat/presence-custom-dispatcher-1.7.19"}, {:phoenix_ecto, "~> 4.4.0"}, {:ecto_sql, "~> 3.11"}, {:ecto_psql_extras, "~> 0.8"}, @@ -65,7 +65,7 @@ defmodule Realtime.MixProject do {:phoenix_view, "~> 2.0"}, {:esbuild, "~> 0.4", runtime: Mix.env() == :dev}, {:tailwind, "~> 0.1", runtime: Mix.env() == :dev}, - {:telemetry_metrics, "~> 0.6"}, + {:telemetry_metrics, "~> 1.0"}, {:telemetry_poller, "~> 1.0"}, {:gettext, "~> 0.19"}, {:jason, "~> 1.3"}, @@ -73,13 +73,15 @@ defmodule Realtime.MixProject do {:libcluster, "~> 3.3"}, {:libcluster_postgres, "~> 0.2"}, {:uuid, "~> 1.1"}, - {:prom_ex, "~> 1.8"}, + {:prom_ex, "~> 1.10"}, + {:peep, git: "https://github.com/supabase/peep.git", branch: "feat/partitions-ets", override: true}, {:joken, "~> 2.5.0"}, {:ex_json_schema, "~> 0.7"}, {:recon, "~> 2.5"}, {:mint, "~> 1.4"}, {:logflare_logger_backend, "~> 0.11"}, {:syn, "~> 3.3"}, + {:beacon, path: "./beacon"}, {:cachex, "~> 4.0"}, {:open_api_spex, "~> 3.16"}, {:corsica, "~> 2.0"}, @@ -90,7 +92,7 @@ defmodule Realtime.MixProject do {:opentelemetry_phoenix, "~> 2.0"}, {:opentelemetry_cowboy, "~> 1.0"}, {:opentelemetry_ecto, "~> 1.2"}, - {:gen_rpc, git: "https://github.com/supabase/gen_rpc.git", ref: "d161cf263c661a534eaabf80aac7a34484dac772"}, + {:gen_rpc, git: "https://github.com/supabase/gen_rpc.git", ref: "5382a0f2689a4cb8838873a2173928281dbe5002"}, {:mimic, "~> 1.0", only: :test}, {:floki, ">= 0.30.0", only: :test}, {:mint_web_socket, "~> 1.0", only: :test}, @@ -121,7 +123,6 @@ defmodule Realtime.MixProject do test: [ "cmd epmd -daemon", "ecto.create --quiet", - "run priv/repo/seeds_before_migration.exs", "ecto.migrate --migrations-path=priv/repo/migrations", "test" ], diff --git a/mix.lock b/mix.lock index 76eb0d980..b106e9cd7 100644 --- a/mix.lock +++ b/mix.lock @@ -3,39 +3,39 @@ "benchee": {:hex, :benchee, "1.1.0", "f3a43817209a92a1fade36ef36b86e1052627fd8934a8b937ac9ab3a76c43062", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}], "hexpm", "7da57d545003165a012b587077f6ba90b89210fd88074ce3c60ce239eb5e6d93"}, "bertex": {:hex, :bertex, "1.3.0", "0ad0df9159b5110d9d2b6654f72fbf42a54884ef43b6b651e6224c0af30ba3cb", [:mix], [], "hexpm", "0a5d5e478bb5764b7b7bae37cae1ca491200e58b089df121a2fe1c223d8ee57a"}, "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, - "cachex": {:hex, :cachex, "4.0.3", "95e88c3ef4d37990948eaecccefe40b4ce4a778e0d7ade29081e6b7a89309ee2", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:ex_hash_ring, "~> 6.0", [hex: :ex_hash_ring, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "d5d632da7f162f8a190f1c39b712c0ebc9cf0007c4e2029d44eddc8041b52d55"}, - "castore": {:hex, :castore, "1.0.11", "4bbd584741601eb658007339ea730b082cc61f3554cf2e8f39bf693a11b49073", [:mix], [], "hexpm", "e03990b4db988df56262852f20de0f659871c35154691427a5047f4967a16a62"}, + "cachex": {:hex, :cachex, "4.1.1", "574c5cd28473db313a0a76aac8c945fe44191659538ca6a1e8946ec300b1a19f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:ex_hash_ring, "~> 6.0", [hex: :ex_hash_ring, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "d6b7449ff98d6bb92dda58bd4fc3189cae9f99e7042054d669596f56dc503cd8"}, + "castore": {:hex, :castore, "1.0.15", "8aa930c890fe18b6fe0a0cff27b27d0d4d231867897bd23ea772dee561f032a3", [:mix], [], "hexpm", "96ce4c69d7d5d7a0761420ef743e2f4096253931a3ba69e5ff8ef1844fe446d3"}, "chatterbox": {:hex, :ts_chatterbox, "0.15.1", "5cac4d15dd7ad61fc3c4415ce4826fc563d4643dee897a558ec4ea0b1c835c9c", [:rebar3], [{:hpack, "~> 0.3.0", [hex: :hpack_erl, repo: "hexpm", optional: false]}], "hexpm", "4f75b91451338bc0da5f52f3480fa6ef6e3a2aeecfc33686d6b3d0a0948f31aa"}, "corsica": {:hex, :corsica, "2.1.3", "dccd094ffce38178acead9ae743180cdaffa388f35f0461ba1e8151d32e190e6", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "616c08f61a345780c2cf662ff226816f04d8868e12054e68963e95285b5be8bc"}, - "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"}, + "cowboy": {:hex, :cowboy, "2.14.2", "4008be1df6ade45e4f2a4e9e2d22b36d0b5aba4e20b0a0d7049e28d124e34847", [:make, :rebar3], [{:cowlib, ">= 2.16.0 and < 3.0.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, ">= 1.8.0 and < 3.0.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "569081da046e7b41b5df36aa359be71a0c8874e5b9cff6f747073fc57baf1ab9"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, - "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, - "credo": {:hex, :credo, "1.7.11", "d3e805f7ddf6c9c854fd36f089649d7cf6ba74c42bc3795d587814e3c9847102", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "56826b4306843253a66e47ae45e98e7d284ee1f95d53d1612bb483f88a8cf219"}, + "cowlib": {:hex, :cowlib, "2.16.0", "54592074ebbbb92ee4746c8a8846e5605052f29309d3a873468d76cdf932076f", [:make, :rebar3], [], "hexpm", "7f478d80d66b747344f0ea7708c187645cfcc08b11aa424632f78e25bf05db51"}, + "credo": {:hex, :credo, "1.7.13", "126a0697df6b7b71cd18c81bc92335297839a806b6f62b61d417500d1070ff4e", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "47641e6d2bbff1e241e87695b29f617f1a8f912adea34296fb10ecc3d7e9e84f"}, "ctx": {:hex, :ctx, "0.6.0", "8ff88b70e6400c4df90142e7f130625b82086077a45364a78d208ed3ed53c7fe", [:rebar3], [], "hexpm", "a14ed2d1b67723dbebbe423b28d7615eb0bdcba6ff28f2d1f1b0a7e1d4aa5fc2"}, - "db_connection": {:hex, :db_connection, "2.8.0", "64fd82cfa6d8e25ec6660cea73e92a4cbc6a18b31343910427b702838c4b33b2", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "008399dae5eee1bf5caa6e86d204dcb44242c82b1ed5e22c881f2c34da201b15"}, + "db_connection": {:hex, :db_connection, "2.8.1", "9abdc1e68c34c6163f6fb96a96532272d13ad7ca45262156ae8b7ec6d9dc4bec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a61a3d489b239d76f326e03b98794fb8e45168396c925ef25feb405ed09da8fd"}, "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, - "dialyxir": {:hex, :dialyxir, "1.4.5", "ca1571ac18e0f88d4ab245f0b60fa31ff1b12cbae2b11bd25d207f865e8ae78a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b0fb08bb8107c750db5c0b324fa2df5ceaa0f9307690ee3c1f6ba5b9eb5d35c3"}, - "ecto": {:hex, :ecto, "3.13.2", "7d0c0863f3fc8d71d17fc3ad3b9424beae13f02712ad84191a826c7169484f01", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "669d9291370513ff56e7b7e7081b7af3283d02e046cf3d403053c557894a0b3e"}, + "dialyxir": {:hex, :dialyxir, "1.4.6", "7cca478334bf8307e968664343cbdb432ee95b4b68a9cba95bdabb0ad5bdfd9a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "8cf5615c5cd4c2da6c501faae642839c8405b49f8aa057ad4ae401cb808ef64d"}, + "ecto": {:hex, :ecto, "3.13.3", "6a983f0917f8bdc7a89e96f2bf013f220503a0da5d8623224ba987515b3f0d80", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "1927db768f53a88843ff25b6ba7946599a8ca8a055f69ad8058a1432a399af94"}, "ecto_psql_extras": {:hex, :ecto_psql_extras, "0.8.8", "aa02529c97f69aed5722899f5dc6360128735a92dd169f23c5d50b1f7fdede08", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "> 0.16.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "04c63d92b141723ad6fed2e60a4b461ca00b3594d16df47bbc48f1f4534f2c49"}, "ecto_sql": {:hex, :ecto_sql, "3.13.2", "a07d2461d84107b3d037097c822ffdd36ed69d1cf7c0f70e12a3d1decf04e2e1", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "539274ab0ecf1a0078a6a72ef3465629e4d6018a3028095dc90f60a19c371717"}, "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, - "esbuild": {:hex, :esbuild, "0.8.2", "5f379dfa383ef482b738e7771daf238b2d1cfb0222bef9d3b20d4c8f06c7a7ac", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "558a8a08ed78eb820efbfda1de196569d8bfa9b51e8371a1934fbb31345feda7"}, + "esbuild": {:hex, :esbuild, "0.10.0", "b0aa3388a1c23e727c5a3e7427c932d89ee791746b0081bbe56103e9ef3d291f", [:mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "468489cda427b974a7cc9f03ace55368a83e1a7be12fba7e30969af78e5f8c70"}, "eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"}, "ex_hash_ring": {:hex, :ex_hash_ring, "6.0.4", "bef9d2d796afbbe25ab5b5a7ed746e06b99c76604f558113c273466d52fa6d6b", [:mix], [], "hexpm", "89adabf31f7d3dfaa36802ce598ce918e9b5b33bae8909ac1a4d052e1e567d18"}, - "ex_json_schema": {:hex, :ex_json_schema, "0.10.2", "7c4b8c1481fdeb1741e2ce66223976edfb9bccebc8014f6aec35d4efe964fb71", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "37f43be60f8407659d4d0155a7e45e7f406dab1f827051d3d35858a709baf6a6"}, - "excoveralls": {:hex, :excoveralls, "0.18.3", "bca47a24d69a3179951f51f1db6d3ed63bca9017f476fe520eb78602d45f7756", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "746f404fcd09d5029f1b211739afb8fb8575d775b21f6a3908e7ce3e640724c6"}, + "ex_json_schema": {:hex, :ex_json_schema, "0.11.1", "b593f92937a095f66054bb318681397dfe7304e7d2b6b1a7534ea3aa40024f8c", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "32d651a575a6ce2fd613f140b0fef8dd0acc7cf8e8bcd29a3a1be5c945700dd5"}, + "excoveralls": {:hex, :excoveralls, "0.18.5", "e229d0a65982613332ec30f07940038fe451a2e5b29bce2a5022165f0c9b157e", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "523fe8a15603f86d64852aab2abe8ddbd78e68579c8525ae765facc5eae01562"}, "expo": {:hex, :expo, "1.1.0", "f7b9ed7fb5745ebe1eeedf3d6f29226c5dd52897ac67c0f8af62a07e661e5c75", [:mix], [], "hexpm", "fbadf93f4700fb44c331362177bdca9eeb8097e8b0ef525c9cc501cb9917c960"}, - "file_system": {:hex, :file_system, "1.1.0", "08d232062284546c6c34426997dd7ef6ec9f8bbd090eb91780283c9016840e8f", [:mix], [], "hexpm", "bfcf81244f416871f2a2e15c1b515287faa5db9c6bcf290222206d120b3d43f6"}, - "finch": {:hex, :finch, "0.19.0", "c644641491ea854fc5c1bbaef36bfc764e3f08e7185e1f084e35e0672241b76d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fc5324ce209125d1e2fa0fcd2634601c52a787aff1cd33ee833664a5af4ea2b6"}, - "floki": {:hex, :floki, "0.37.0", "b83e0280bbc6372f2a403b2848013650b16640cd2470aea6701f0632223d719e", [:mix], [], "hexpm", "516a0c15a69f78c47dc8e0b9b3724b29608aa6619379f91b1ffa47109b5d0dd3"}, - "gen_rpc": {:git, "https://github.com/supabase/gen_rpc.git", "d161cf263c661a534eaabf80aac7a34484dac772", [ref: "d161cf263c661a534eaabf80aac7a34484dac772"]}, + "file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"}, + "finch": {:hex, :finch, "0.20.0", "5330aefb6b010f424dcbbc4615d914e9e3deae40095e73ab0c1bb0968933cadf", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2658131a74d051aabfcba936093c903b8e89da9a1b63e430bee62045fa9b2ee2"}, + "floki": {:hex, :floki, "0.38.0", "62b642386fa3f2f90713f6e231da0fa3256e41ef1089f83b6ceac7a3fd3abf33", [:mix], [], "hexpm", "a5943ee91e93fb2d635b612caf5508e36d37548e84928463ef9dd986f0d1abd9"}, + "gen_rpc": {:git, "https://github.com/supabase/gen_rpc.git", "5382a0f2689a4cb8838873a2173928281dbe5002", [ref: "5382a0f2689a4cb8838873a2173928281dbe5002"]}, "gettext": {:hex, :gettext, "0.26.2", "5978aa7b21fada6deabf1f6341ddba50bc69c999e812211903b169799208f2a8", [:mix], [{:expo, "~> 0.5.1 or ~> 1.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "aa978504bcf76511efdc22d580ba08e2279caab1066b76bb9aa81c4a1e0a32a5"}, "gproc": {:hex, :gproc, "0.9.1", "f1df0364423539cf0b80e8201c8b1839e229e5f9b3ccb944c5834626998f5b8c", [:rebar3], [], "hexpm", "905088e32e72127ed9466f0bac0d8e65704ca5e73ee5a62cb073c3117916d507"}, "grpcbox": {:hex, :grpcbox, "0.17.1", "6e040ab3ef16fe699ffb513b0ef8e2e896da7b18931a1ef817143037c454bcce", [:rebar3], [{:acceptor_pool, "~> 1.0.0", [hex: :acceptor_pool, repo: "hexpm", optional: false]}, {:chatterbox, "~> 0.15.1", [hex: :ts_chatterbox, repo: "hexpm", optional: false]}, {:ctx, "~> 0.6.0", [hex: :ctx, repo: "hexpm", optional: false]}, {:gproc, "~> 0.9.1", [hex: :gproc, repo: "hexpm", optional: false]}], "hexpm", "4a3b5d7111daabc569dc9cbd9b202a3237d81c80bf97212fbc676832cb0ceb17"}, - "ham": {:hex, :ham, "0.3.0", "7cd031b4a55fba219c11553e7b13ba73bd86eab4034518445eff1e038cb9a44d", [:mix], [], "hexpm", "7d6c6b73d7a6a83233876cc1b06a4d9b5de05562b228effda4532f9a49852bf6"}, + "ham": {:hex, :ham, "0.3.2", "02ae195f49970ef667faf9d01bc454fb80909a83d6c775bcac724ca567aeb7b3", [:mix], [], "hexpm", "b71cc684c0e5a3d32b5f94b186770551509e93a9ae44ca1c1a313700f2f6a69a"}, "hpack": {:hex, :hpack_erl, "0.3.0", "2461899cc4ab6a0ef8e970c1661c5fc6a52d3c25580bc6dd204f84ce94669926", [:rebar3], [], "hexpm", "d6137d7079169d8c485c6962dfe261af5b9ef60fbc557344511c1e65e3d95fb0"}, - "hpax": {:hex, :hpax, "1.0.2", "762df951b0c399ff67cc57c3995ec3cf46d696e41f0bba17da0518d94acd4aac", [:mix], [], "hexpm", "2f09b4c1074e0abd846747329eaa26d535be0eb3d189fa69d812bfb8bfefd32f"}, + "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"}, "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, "joken": {:hex, :joken, "2.5.0", "09be497d804b8115eb6f07615cef2e60c2a1008fb89dc0aef0d4c4b4609b99aa", [:mix], [{:jose, "~> 1.11.2", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "22b25c89617c5ed8ca7b31026340a25ea0f9ca7160f9706b79be9ed81fdf74e7"}, "jose": {:hex, :jose, "1.11.10", "a903f5227417bd2a08c8a00a0cbcc458118be84480955e8d251297a425723f83", [:mix, :rebar3], [], "hexpm", "0d6cd36ff8ba174db29148fc112b5842186b68a90ce9fc2b3ec3afe76593e614"}, @@ -45,65 +45,66 @@ "logflare_api_client": {:hex, :logflare_api_client, "0.3.5", "c427ebf65a8402d68b056d4a5ef3e1eb3b90c0ad1d0de97d1fe23807e0c1b113", [:mix], [{:bertex, "~> 1.3", [hex: :bertex, repo: "hexpm", optional: false]}, {:finch, "~> 0.10", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:tesla, "~> 1.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "16d29abcb80c4f72745cdf943379da02a201504813c3aa12b4d4acb0302b7723"}, "logflare_etso": {:hex, :logflare_etso, "1.1.2", "040bd3e482aaf0ed20080743b7562242ec5079fd88a6f9c8ce5d8298818292e9", [:mix], [{:ecto, "~> 3.8", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "ab96be42900730a49b132891f43a9be1d52e4ad3ee9ed9cb92565c5f87345117"}, "logflare_logger_backend": {:hex, :logflare_logger_backend, "0.11.4", "3a5df94e764b7c8ee4bd7b875a480a34a27807128d8459aa59ea63b2b38bddc7", [:mix], [{:bertex, "~> 1.3", [hex: :bertex, repo: "hexpm", optional: false]}, {:logflare_api_client, "~> 0.3.5", [hex: :logflare_api_client, repo: "hexpm", optional: false]}, {:logflare_etso, "~> 1.1.2", [hex: :logflare_etso, repo: "hexpm", optional: false]}, {:typed_struct, "~> 0.3.0", [hex: :typed_struct, repo: "hexpm", optional: false]}], "hexpm", "00998d81b3c481ad93d2bf25e66d1ddb1a01ad77d994e2c1a7638c6da94755c5"}, - "mime": {:hex, :mime, "2.0.6", "8f18486773d9b15f95f4f4f1e39b710045fa1de891fada4516559967276e4dc2", [:mix], [], "hexpm", "c9945363a6b26d747389aac3643f8e0e09d30499a138ad64fe8fd1d13d9b153e"}, + "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"}, "mimic": {:hex, :mimic, "1.12.0", "34c9d1fb8e756df09ca5f96861d273f2bb01063df1a6a51a4c101f9ad7f07a9c", [:mix], [{:ham, "~> 0.2", [hex: :ham, repo: "hexpm", optional: false]}], "hexpm", "eaa43d495d6f3bc8099b28886e05a1b09a2a6be083f6385c3abc17599e5e2c43"}, - "mint": {:hex, :mint, "1.6.2", "af6d97a4051eee4f05b5500671d47c3a67dac7386045d87a904126fd4bbcea2e", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "5ee441dffc1892f1ae59127f74afe8fd82fda6587794278d924e4d90ea3d63f9"}, + "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"}, "mint_web_socket": {:hex, :mint_web_socket, "1.0.4", "0b539116dbb3d3f861cdf5e15e269a933cb501c113a14db7001a3157d96ffafd", [:mix], [{:mint, ">= 1.4.1 and < 2.0.0-0", [hex: :mint, repo: "hexpm", optional: false]}], "hexpm", "027d4c5529c45a4ba0ce27a01c0f35f284a5468519c045ca15f43decb360a991"}, - "mix_audit": {:hex, :mix_audit, "2.1.4", "0a23d5b07350cdd69001c13882a4f5fb9f90fbd4cbf2ebc190a2ee0d187ea3e9", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "fd807653cc8c1cada2911129c7eb9e985e3cc76ebf26f4dd628bb25bbcaa7099"}, + "mix_audit": {:hex, :mix_audit, "2.1.5", "c0f77cee6b4ef9d97e37772359a187a166c7a1e0e08b50edf5bf6959dfe5a016", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "87f9298e21da32f697af535475860dc1d3617a010e0b418d2ec6142bc8b42d69"}, "mix_test_watch": {:hex, :mix_test_watch, "1.3.0", "2ffc9f72b0d1f4ecf0ce97b044e0e3c607c3b4dc21d6228365e8bc7c2856dc77", [:mix], [{:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "f9e5edca976857ffac78632e635750d158df14ee2d6185a15013844af7570ffe"}, "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"}, "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, - "observer_cli": {:hex, :observer_cli, "1.8.1", "edfe0c0f983631961599326f239f6e99750aba7387515002b1284dcfe7fcd6d2", [:mix, :rebar3], [{:recon, "~> 2.5.6", [hex: :recon, repo: "hexpm", optional: false]}], "hexpm", "a3cd6300dd8290ade93d688fbd79c872e393b01256309dd7a653feb13c434fb4"}, + "observer_cli": {:hex, :observer_cli, "1.8.4", "09030c04d2480499037ba33d801c6e02adba4e7244a05e05b984b5a82843be71", [:mix, :rebar3], [{:recon, "~> 2.5.6", [hex: :recon, repo: "hexpm", optional: false]}], "hexpm", "0fcd71ac723bcd2d91266d99b3c3ccd9465c71c9f392d900cea8effdc1a1485c"}, "octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"}, - "open_api_spex": {:hex, :open_api_spex, "3.21.2", "6a704f3777761feeb5657340250d6d7332c545755116ca98f33d4b875777e1e5", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0 or ~> 6.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "f42ae6ed668b895ebba3e02773cfb4b41050df26f803f2ef634c72a7687dc387"}, - "opentelemetry": {:hex, :opentelemetry, "1.5.0", "7dda6551edfc3050ea4b0b40c0d2570423d6372b97e9c60793263ef62c53c3c2", [:rebar3], [{:opentelemetry_api, "~> 1.4", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "cdf4f51d17b592fc592b9a75f86a6f808c23044ba7cf7b9534debbcc5c23b0ee"}, - "opentelemetry_api": {:hex, :opentelemetry_api, "1.4.0", "63ca1742f92f00059298f478048dfb826f4b20d49534493d6919a0db39b6db04", [:mix, :rebar3], [], "hexpm", "3dfbbfaa2c2ed3121c5c483162836c4f9027def469c41578af5ef32589fcfc58"}, + "open_api_spex": {:hex, :open_api_spex, "3.22.0", "fbf90dc82681dc042a4ee79853c8e989efbba73d9e87439085daf849bbf8bc20", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0 or ~> 6.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "dd751ddbdd709bb4a5313e9a24530da6e66594773c7242a0c2592cbd9f589063"}, + "opentelemetry": {:hex, :opentelemetry, "1.6.0", "0954dbe12f490ee7b126c9e924cf60141b1238a02dfc700907eadde4dcc20460", [:rebar3], [{:opentelemetry_api, "~> 1.4.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "5fd0123d65d2649f10e478e7444927cd9fbdffcaeb8c1c2fcae3d486d18c5e62"}, + "opentelemetry_api": {:hex, :opentelemetry_api, "1.4.1", "e071429a37441a0fe9097eeea0ff921ebadce8eba8e1ce297b05a43c7a0d121f", [:mix, :rebar3], [], "hexpm", "39bdb6ad740bc13b16215cb9f233d66796bbae897f3bf6eb77abb712e87c3c26"}, "opentelemetry_cowboy": {:hex, :opentelemetry_cowboy, "1.0.0", "786c7cde66a2493323c79d2c94e679ff501d459a9b403d8b60b9bef116333117", [:rebar3], [{:cowboy_telemetry, "~> 0.4", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:opentelemetry_api, "~> 1.4", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:opentelemetry_semantic_conventions, "~> 1.27", [hex: :opentelemetry_semantic_conventions, repo: "hexpm", optional: false]}, {:opentelemetry_telemetry, "~> 1.1", [hex: :opentelemetry_telemetry, repo: "hexpm", optional: false]}, {:otel_http, "~> 0.2", [hex: :otel_http, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7575716eaccacd0eddc3e7e61403aecb5d0a6397183987d6049094aeb0b87a7c"}, "opentelemetry_ecto": {:hex, :opentelemetry_ecto, "1.2.0", "2382cb47ddc231f953d3b8263ed029d87fbf217915a1da82f49159d122b64865", [:mix], [{:opentelemetry_api, "~> 1.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.2", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "70dfa2e79932e86f209df00e36c980b17a32f82d175f0068bf7ef9a96cf080cf"}, - "opentelemetry_exporter": {:hex, :opentelemetry_exporter, "1.8.0", "5d546123230771ef4174e37bedfd77e3374913304cd6ea3ca82a2add49cd5d56", [:rebar3], [{:grpcbox, ">= 0.0.0", [hex: :grpcbox, repo: "hexpm", optional: false]}, {:opentelemetry, "~> 1.5.0", [hex: :opentelemetry, repo: "hexpm", optional: false]}, {:opentelemetry_api, "~> 1.4.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:tls_certificate_check, "~> 1.18", [hex: :tls_certificate_check, repo: "hexpm", optional: false]}], "hexpm", "a1f9f271f8d3b02b81462a6bfef7075fd8457fdb06adff5d2537df5e2264d9af"}, + "opentelemetry_exporter": {:hex, :opentelemetry_exporter, "1.9.0", "e344bf5e3dab2815fe381b0cac172c06cfc29ecf792c5d74cbbd2b3184af359c", [:rebar3], [{:grpcbox, ">= 0.0.0", [hex: :grpcbox, repo: "hexpm", optional: false]}, {:opentelemetry, "~> 1.6.0", [hex: :opentelemetry, repo: "hexpm", optional: false]}, {:opentelemetry_api, "~> 1.4.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:tls_certificate_check, "~> 1.18", [hex: :tls_certificate_check, repo: "hexpm", optional: false]}], "hexpm", "2030a59e33afff6aaeba847d865c8db5dc3873db87a9257df2ca03cafd9e0478"}, "opentelemetry_phoenix": {:hex, :opentelemetry_phoenix, "2.0.1", "c664cdef205738cffcd409b33599439a4ffb2035ef6e21a77927ac1da90463cb", [:mix], [{:nimble_options, "~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:opentelemetry_api, "~> 1.4", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: false]}, {:opentelemetry_semantic_conventions, "~> 1.27", [hex: :opentelemetry_semantic_conventions, repo: "hexpm", optional: false]}, {:opentelemetry_telemetry, "~> 1.1", [hex: :opentelemetry_telemetry, repo: "hexpm", optional: false]}, {:otel_http, "~> 0.2", [hex: :otel_http, repo: "hexpm", optional: false]}, {:plug, ">= 1.11.0", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a24fdccdfa6b890c8892c6366beab4a15a27ec0c692b0f77ec2a862e7b235f6e"}, "opentelemetry_process_propagator": {:hex, :opentelemetry_process_propagator, "0.3.0", "ef5b2059403a1e2b2d2c65914e6962e56371570b8c3ab5323d7a8d3444fb7f84", [:mix, :rebar3], [{:opentelemetry_api, "~> 1.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "7243cb6de1523c473cba5b1aefa3f85e1ff8cc75d08f367104c1e11919c8c029"}, "opentelemetry_semantic_conventions": {:hex, :opentelemetry_semantic_conventions, "1.27.0", "acd0194a94a1e57d63da982ee9f4a9f88834ae0b31b0bd850815fe9be4bbb45f", [:mix, :rebar3], [], "hexpm", "9681ccaa24fd3d810b4461581717661fd85ff7019b082c2dff89c7d5b1fc2864"}, "opentelemetry_telemetry": {:hex, :opentelemetry_telemetry, "1.1.2", "410ab4d76b0921f42dbccbe5a7c831b8125282850be649ee1f70050d3961118a", [:mix, :rebar3], [{:opentelemetry_api, "~> 1.3", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "641ab469deb181957ac6d59bce6e1321d5fe2a56df444fc9c19afcad623ab253"}, "otel_http": {:hex, :otel_http, "0.2.0", "b17385986c7f1b862f5d577f72614ecaa29de40392b7618869999326b9a61d8a", [:rebar3], [], "hexpm", "f2beadf922c8cfeb0965488dd736c95cc6ea8b9efce89466b3904d317d7cc717"}, - "phoenix": {:hex, :phoenix, "1.7.19", "36617efe5afbd821099a8b994ff4618a340a5bfb25531a1802c4d4c634017a57", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "ba4dc14458278773f905f8ae6c2ec743d52c3a35b6b353733f64f02dfe096cd6"}, + "peep": {:git, "https://github.com/supabase/peep.git", "3ba8f8f77f4c8dae734f9d8f603c24c1046502da", [branch: "feat/partitions-ets"]}, + "phoenix": {:git, "https://github.com/supabase/phoenix.git", "7b884cc0cc1a49ad2bc272acda2e622b3e11c139", [branch: "feat/presence-custom-dispatcher-1.7.19"]}, "phoenix_ecto": {:hex, :phoenix_ecto, "4.4.3", "86e9878f833829c3f66da03d75254c155d91d72a201eb56ae83482328dc7ca93", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d36c401206f3011fefd63d04e8ef626ec8791975d9d107f9a0817d426f61ac07"}, "phoenix_html": {:hex, :phoenix_html, "3.3.4", "42a09fc443bbc1da37e372a5c8e6755d046f22b9b11343bf885067357da21cb3", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0249d3abec3714aff3415e7ee3d9786cb325be3151e6c4b3021502c585bf53fb"}, - "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.6", "7b1f0327f54c9eb69845fd09a77accf922f488c549a7e7b8618775eb603a62c7", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "1681ab813ec26ca6915beb3414aa138f298e17721dc6a2bde9e6eb8a62360ff6"}, - "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.5.3", "f2161c207fda0e4fb55165f650f7f8db23f02b29e3bff00ff7ef161d6ac1f09d", [:mix], [{:file_system, "~> 0.3 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "b4ec9cd73cb01ff1bd1cac92e045d13e7030330b74164297d1aee3907b54803c"}, + "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.7", "405880012cb4b706f26dd1c6349125bfc903fb9e44d1ea668adaf4e04d4884b7", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "3a8625cab39ec261d48a13b7468dc619c0ede099601b084e343968309bd4d7d7"}, + "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.1", "05df733a09887a005ed0d69a7fc619d376aea2730bf64ce52ac51ce716cc1ef0", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "74273843d5a6e4fef0bbc17599f33e3ec63f08e69215623a0cd91eea4288e5a0"}, "phoenix_live_view": {:hex, :phoenix_live_view, "0.20.17", "f396bbdaf4ba227b82251eb75ac0afa6b3da5e509bc0d030206374237dfc9450", [:mix], [{:floki, "~> 0.36", [hex: :floki, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a61d741ffb78c85fdbca0de084da6a48f8ceb5261a79165b5a0b59e5f65ce98b"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, "phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"}, - "plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"}, - "plug_cowboy": {:hex, :plug_cowboy, "2.7.2", "fdadb973799ae691bf9ecad99125b16625b1c6039999da5fe544d99218e662e4", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "245d8a11ee2306094840c000e8816f0cbed69a23fc0ac2bcf8d7835ae019bb2f"}, - "plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"}, + "plug": {:hex, :plug, "1.18.1", "5067f26f7745b7e31bc3368bc1a2b818b9779faa959b49c934c17730efc911cf", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "57a57db70df2b422b564437d2d33cf8d33cd16339c1edb190cd11b1a3a546cc2"}, + "plug_cowboy": {:hex, :plug_cowboy, "2.7.4", "729c752d17cf364e2b8da5bdb34fb5804f56251e88bb602aff48ae0bd8673d11", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "9b85632bd7012615bae0a5d70084deb1b25d2bcbb32cab82d1e9a1e023168aa3"}, + "plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"}, "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"}, - "postgres_replication": {:git, "https://github.com/filipecabaco/postgres_replication.git", "69129221f0263aa13faa5fbb8af97c28aeb4f71c", []}, + "postgres_replication": {:git, "https://github.com/filipecabaco/postgres_replication.git", "3b0700ee38a1dddaf7936c5793d6f35431fee2cd", []}, "postgrex": {:hex, :postgrex, "0.20.0", "363ed03ab4757f6bc47942eff7720640795eb557e1935951c1626f0d303a3aed", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "d36ef8b36f323d29505314f704e21a1a038e2dc387c6409ee0cd24144e187c0f"}, - "prom_ex": {:hex, :prom_ex, "1.9.0", "63e6dda6c05cdeec1f26c48443dcc38ffd2118b3665ae8d2bd0e5b79f2aea03e", [:mix], [{:absinthe, ">= 1.6.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.0.2", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.5.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.15", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.4.0", [hex: :oban, repo: "hexpm", optional: true]}, {:octo_fetch, "~> 0.3", [hex: :octo_fetch, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.14.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.12.1", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.5", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry, ">= 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "01f3d4f69ec93068219e686cc65e58a29c42bea5429a8ff4e2121f19db178ee6"}, - "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, + "prom_ex": {:hex, :prom_ex, "1.11.0", "1f6d67f2dead92224cb4f59beb3e4d319257c5728d9638b4a5e8ceb51a4f9c7e", [:mix], [{:absinthe, ">= 1.7.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.1.0", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.11.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.18", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.10.0", [hex: :oban, repo: "hexpm", optional: true]}, {:octo_fetch, "~> 0.4", [hex: :octo_fetch, repo: "hexpm", optional: false]}, {:peep, "~> 3.0", [hex: :peep, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.7.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.20.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.16.0", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 2.6.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, ">= 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.2", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.1", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "76b074bc3730f0802978a7eb5c7091a65473eaaf07e99ec9e933138dcc327805"}, + "ranch": {:hex, :ranch, "2.2.0", "25528f82bc8d7c6152c57666ca99ec716510fe0925cb188172f41ce93117b1b0", [:make, :rebar3], [], "hexpm", "fa0b99a1780c80218a4197a59ea8d3bdae32fbff7e88527d7d8a4787eff4f8e7"}, "recon": {:hex, :recon, "2.5.6", "9052588e83bfedfd9b72e1034532aee2a5369d9d9343b61aeb7fbce761010741", [:mix, :rebar3], [], "hexpm", "96c6799792d735cc0f0fd0f86267e9d351e63339cbe03df9d162010cefc26bb0"}, - "req": {:hex, :req, "0.5.10", "a3a063eab8b7510785a467f03d30a8d95f66f5c3d9495be3474b61459c54376c", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "8a604815743f8a2d3b5de0659fa3137fa4b1cffd636ecb69b30b2b9b2c2559be"}, + "req": {:hex, :req, "0.5.15", "662020efb6ea60b9f0e0fac9be88cd7558b53fe51155a2d9899de594f9906ba9", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "a6513a35fad65467893ced9785457e91693352c70b58bbc045b47e5eb2ef0c53"}, "sleeplocks": {:hex, :sleeplocks, "1.1.3", "96a86460cc33b435c7310dbd27ec82ca2c1f24ae38e34f8edde97f756503441a", [:rebar3], [], "hexpm", "d3b3958552e6eb16f463921e70ae7c767519ef8f5be46d7696cc1ed649421321"}, "snabbkaffe": {:git, "https://github.com/kafka4beam/snabbkaffe", "b59298334ed349556f63405d1353184c63c66534", [tag: "1.0.10"]}, - "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"}, + "sobelow": {:hex, :sobelow, "0.14.1", "2f81e8632f15574cba2402bcddff5497b413c01e6f094bc0ab94e83c2f74db81", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8fac9a2bd90fdc4b15d6fca6e1608efb7f7c600fa75800813b794ee9364c87f2"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, - "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, + "statistex": {:hex, :statistex, "1.1.0", "7fec1eb2f580a0d2c1a05ed27396a084ab064a40cfc84246dbfb0c72a5c761e5", [:mix], [], "hexpm", "f5950ea26ad43246ba2cce54324ac394a4e7408fdcf98b8e230f503a0cba9cf5"}, "syn": {:hex, :syn, "3.3.0", "4684a909efdfea35ce75a9662fc523e4a8a4e8169a3df275e4de4fa63f99c486", [:rebar3], [], "hexpm", "e58ee447bc1094bdd21bf0acc102b1fbf99541a508cd48060bf783c245eaf7d6"}, "table_rex": {:hex, :table_rex, "4.1.0", "fbaa8b1ce154c9772012bf445bfb86b587430fb96f3b12022d3f35ee4a68c918", [:mix], [], "hexpm", "95932701df195d43bc2d1c6531178fc8338aa8f38c80f098504d529c43bc2601"}, - "tailwind": {:hex, :tailwind, "0.2.4", "5706ec47182d4e7045901302bf3a333e80f3d1af65c442ba9a9eed152fb26c2e", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}], "hexpm", "c6e4a82b8727bab593700c998a4d98cf3d8025678bfde059aed71d0000c3e463"}, + "tailwind": {:hex, :tailwind, "0.4.1", "e7bcc222fe96a1e55f948e76d13dd84a1a7653fb051d2a167135db3b4b08d3e9", [:mix], [], "hexpm", "6249d4f9819052911120dbdbe9e532e6bd64ea23476056adb7f730aa25c220d1"}, "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, - "telemetry_metrics": {:hex, :telemetry_metrics, "0.6.2", "2caabe9344ec17eafe5403304771c3539f3b6e2f7fb6a6f602558c825d0d0bfb", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9b43db0dc33863930b9ef9d27137e78974756f5f198cae18409970ed6fa5b561"}, + "telemetry_metrics": {:hex, :telemetry_metrics, "1.1.0", "5bd5f3b5637e0abea0426b947e3ce5dd304f8b3bc6617039e2b5a008adc02f8f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e7b79e8ddfde70adb6db8a6623d1778ec66401f366e9a8f5dd0955c56bc8ce67"}, "telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.2.1", "c9755987d7b959b557084e6990990cb96a50d6482c683fb9622a63837f3cd3d8", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "5e2c599da4983c4f88a33e9571f1458bf98b0cf6ba930f1dc3a6e8cf45d5afb6"}, - "telemetry_poller": {:hex, :telemetry_poller, "1.1.0", "58fa7c216257291caaf8d05678c8d01bd45f4bdbc1286838a28c4bb62ef32999", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9eb9d9cbfd81cbd7cdd24682f8711b6e2b691289a0de6826e58452f28c103c8f"}, - "tesla": {:hex, :tesla, "1.13.2", "85afa342eb2ac0fee830cf649dbd19179b6b359bec4710d02a3d5d587f016910", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:mox, "~> 1.0", [hex: :mox, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "960609848f1ef654c3cdfad68453cd84a5febecb6ed9fed9416e36cd9cd724f9"}, - "tls_certificate_check": {:hex, :tls_certificate_check, "1.28.0", "c39bf21f67c2d124ae905454fad00f27e625917e8ab1009146e916e1df6ab275", [:rebar3], [{:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "3ab058c3f9457fffca916729587415f0ddc822048a0e5b5e2694918556d92df1"}, + "telemetry_poller": {:hex, :telemetry_poller, "1.3.0", "d5c46420126b5ac2d72bc6580fb4f537d35e851cc0f8dbd571acf6d6e10f5ec7", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "51f18bed7128544a50f75897db9974436ea9bfba560420b646af27a9a9b35211"}, + "tesla": {:hex, :tesla, "1.15.3", "3a2b5c37f09629b8dcf5d028fbafc9143c0099753559d7fe567eaabfbd9b8663", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.21", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:mox, "~> 1.0", [hex: :mox, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "98bb3d4558abc67b92fb7be4cd31bb57ca8d80792de26870d362974b58caeda7"}, + "tls_certificate_check": {:hex, :tls_certificate_check, "1.29.0", "4473005eb0bbdad215d7083a230e2e076f538d9ea472c8009fd22006a4cfc5f6", [:rebar3], [{:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "5b0d0e5cb0f928bc4f210df667304ed91c5bff2a391ce6bdedfbfe70a8f096c5"}, "typed_struct": {:hex, :typed_struct, "0.3.0", "939789e3c1dca39d7170c87f729127469d1315dcf99fee8e152bb774b17e7ff7", [:mix], [], "hexpm", "c50bd5c3a61fe4e198a8504f939be3d3c85903b382bde4865579bc23111d1b6d"}, "unsafe": {:hex, :unsafe, "1.0.2", "23c6be12f6c1605364801f4b47007c0c159497d0446ad378b5cf05f1855c0581", [:mix], [], "hexpm", "b485231683c3ab01a9cd44cb4a79f152c6f3bb87358439c6f68791b85c2df675"}, "uuid": {:hex, :uuid, "1.1.8", "e22fc04499de0de3ed1116b770c7737779f226ceefa0badb3592e64d5cfb4eb9", [:mix], [], "hexpm", "c790593b4c3b601f5dc2378baae7efaf5b3d73c4c6456ba85759905be792f2ac"}, "websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"}, "websock_adapter": {:hex, :websock_adapter, "0.5.8", "3b97dc94e407e2d1fc666b2fb9acf6be81a1798a2602294aac000260a7c4a47d", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "315b9a1865552212b5f35140ad194e67ce31af45bcee443d4ecb96b5fd3f3782"}, "yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"}, - "yaml_elixir": {:hex, :yaml_elixir, "2.11.0", "9e9ccd134e861c66b84825a3542a1c22ba33f338d82c07282f4f1f52d847bd50", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "53cc28357ee7eb952344995787f4bb8cc3cecbf189652236e9b163e8ce1bc242"}, + "yaml_elixir": {:hex, :yaml_elixir, "2.12.0", "30343ff5018637a64b1b7de1ed2a3ca03bc641410c1f311a4dbdc1ffbbf449c7", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "ca6bacae7bac917a7155dca0ab6149088aa7bc800c94d0fe18c5238f53b313c6"}, } diff --git a/priv/repo/dev_seeds.exs b/priv/repo/dev_seeds.exs index 7dec7895a..7767edec0 100644 --- a/priv/repo/dev_seeds.exs +++ b/priv/repo/dev_seeds.exs @@ -1,5 +1,3 @@ -import Ecto.Adapters.SQL, only: [query!: 3] - alias Realtime.Api.Tenant alias Realtime.Database alias Realtime.Repo @@ -41,19 +39,6 @@ default_db_host = "127.0.0.1" }) |> Repo.insert!() - publication = "supabase_realtime" - - [ - "drop publication if exists #{publication}", - "drop table if exists public.test_tenant;", - "create table public.test_tenant ( id SERIAL PRIMARY KEY, details text );", - "grant all on table public.test_tenant to anon;", - "grant all on table public.test_tenant to postgres;", - "grant all on table public.test_tenant to authenticated;", - "create publication #{publication} for table public.test_tenant" - ] - |> Enum.each(&query!(Repo, &1, [])) - tenant end) @@ -61,10 +46,22 @@ default_db_host = "127.0.0.1" settings = Database.from_tenant(tenant, "realtime_migrations", :stop) settings = %{settings | max_restarts: 0, ssl: false} {:ok, tenant_conn} = Database.connect_db(settings) +publication = "supabase_realtime" Postgrex.transaction(tenant_conn, fn db_conn -> Postgrex.query!(db_conn, "DROP SCHEMA IF EXISTS realtime CASCADE", []) Postgrex.query!(db_conn, "CREATE SCHEMA IF NOT EXISTS realtime", []) + + [ + "drop publication if exists #{publication}", + "drop table if exists public.test_tenant;", + "create table public.test_tenant ( id SERIAL PRIMARY KEY, details text );", + "grant all on table public.test_tenant to anon;", + "grant all on table public.test_tenant to postgres;", + "grant all on table public.test_tenant to authenticated;", + "create publication #{publication} for table public.test_tenant" + ] + |> Enum.each(&Postgrex.query!(db_conn, &1)) end) case Tenants.Migrations.run_migrations(tenant) do diff --git a/priv/repo/migrations/20250926223044_set_default_presence_value.exs b/priv/repo/migrations/20250926223044_set_default_presence_value.exs new file mode 100644 index 000000000..5f1833a34 --- /dev/null +++ b/priv/repo/migrations/20250926223044_set_default_presence_value.exs @@ -0,0 +1,10 @@ +defmodule Realtime.Repo.Migrations.SetDefaultPresenceValue do + use Ecto.Migration + @disable_ddl_transaction true + @disable_migration_lock true + def change do + alter table(:tenants) do + modify :max_presence_events_per_second, :integer, default: 1000 + end + end +end diff --git a/priv/repo/migrations/20251204170944_nullable_jwt_secrets.exs b/priv/repo/migrations/20251204170944_nullable_jwt_secrets.exs new file mode 100644 index 000000000..342a80ad9 --- /dev/null +++ b/priv/repo/migrations/20251204170944_nullable_jwt_secrets.exs @@ -0,0 +1,13 @@ +defmodule Realtime.Repo.Migrations.NullableJwtSecrets do + use Ecto.Migration + + def change do + alter table(:tenants) do + modify :jwt_secret, :text, null: true + end + + create constraint(:tenants, :jwt_secret_or_jwt_jwks_required, + check: "jwt_secret IS NOT NULL OR jwt_jwks IS NOT NULL" + ) + end +end diff --git a/priv/repo/migrations/20251218000543_ensure_jwt_secret_is_text.exs b/priv/repo/migrations/20251218000543_ensure_jwt_secret_is_text.exs new file mode 100644 index 000000000..008c9d7db --- /dev/null +++ b/priv/repo/migrations/20251218000543_ensure_jwt_secret_is_text.exs @@ -0,0 +1,9 @@ +defmodule Realtime.Repo.Migrations.EnsureJwtSecretIsText do + use Ecto.Migration + + def change do + alter table(:tenants) do + modify :jwt_secret, :text, null: true + end + end +end diff --git a/rel/vm.args.eex b/rel/vm.args.eex index 278da5524..983e240c4 100644 --- a/rel/vm.args.eex +++ b/rel/vm.args.eex @@ -10,8 +10,8 @@ ## Tweak GC to run more often ##-env ERL_FULLSWEEP_AFTER 10 -## Limit process heap for all procs to 1000 MB -+hmax 1000000000 +## Limit process heap for all procs to 2500 MB. The number here is the number of words ++hmax <%= div(2_500_000_000, :erlang.system_info(:wordsize)) %> ## Set distribution buffer busy limit (default is 1024) +zdbbl 100000 @@ -19,4 +19,4 @@ ## Disable Busy Wait +sbwt none +sbwtdio none -+sbwtdcpu none \ No newline at end of file ++sbwtdcpu none diff --git a/run.sh b/run.sh index 2dddbc1b8..66585dc2b 100755 --- a/run.sh +++ b/run.sh @@ -90,7 +90,7 @@ if [ "${ENABLE_ERL_CRASH_DUMP:-false}" = true ]; then trap upload_crash_dump_to_s3 INT TERM KILL EXIT fi -if [[ -n "${GENERATE_CLUSTER_CERTS}" ]] ; then +if [[ -n "${GENERATE_CLUSTER_CERTS:-}" ]] ; then generate_certs fi diff --git a/test/e2e/tests.ts b/test/e2e/tests.ts index 2711a959e..4193b06c2 100644 --- a/test/e2e/tests.ts +++ b/test/e2e/tests.ts @@ -1,8 +1,5 @@ import { load } from "https://deno.land/std@0.224.0/dotenv/mod.ts"; -import { - createClient, - SupabaseClient, -} from "npm:@supabase/supabase-js@2.49.5-next.5"; +import { createClient, SupabaseClient } from "npm:@supabase/supabase-js@latest"; import { assertEquals } from "https://deno.land/std@0.224.0/assert/mod.ts"; import { describe, @@ -69,11 +66,7 @@ describe("broadcast extension", () => { while (activeChannel.state == "joining") await sleep(0.2); // Send from unsubscribed channel - supabase.channel(topic, config).send({ - type: "broadcast", - event, - payload: expectedPayload, - }); + supabase.channel(topic, config).httpSend(event, expectedPayload); while (result == null) await sleep(0.2); diff --git a/test/integration/distributed_realtime_channel_test.exs b/test/integration/distributed_realtime_channel_test.exs new file mode 100644 index 000000000..54411d414 --- /dev/null +++ b/test/integration/distributed_realtime_channel_test.exs @@ -0,0 +1,61 @@ +defmodule Realtime.Integration.DistributedRealtimeChannelTest do + # Use of Clustered + use RealtimeWeb.ConnCase, + async: false, + parameterize: [%{serializer: Phoenix.Socket.V1.JSONSerializer}, %{serializer: RealtimeWeb.Socket.V2Serializer}] + + alias Phoenix.Socket.Message + + alias Realtime.Tenants.Connect + alias Realtime.Integration.WebsocketClient + + setup do + tenant = Realtime.Api.get_tenant_by_external_id("dev_tenant") + + RateCounterHelper.stop(tenant.external_id) + + Connect.shutdown(tenant.external_id) + # Sleeping so that syn can forget about this Connect process + Process.sleep(100) + + on_exit(fn -> + Connect.shutdown(tenant.external_id) + # Sleeping so that syn can forget about this Connect process + Process.sleep(100) + end) + + on_exit(fn -> Connect.shutdown(tenant.external_id) end) + {:ok, node} = Clustered.start() + region = Realtime.Tenants.region(tenant) + {:ok, db_conn} = :erpc.call(node, Connect, :connect, ["dev_tenant", region]) + assert Connect.ready?(tenant.external_id) + + assert node(db_conn) == node + %{tenant: tenant, topic: random_string()} + end + + describe "distributed broadcast" do + @tag mode: :distributed + test "it works", %{tenant: tenant, topic: topic, serializer: serializer} do + {:ok, token} = + generate_token(tenant, %{exp: System.system_time(:second) + 1000, role: "authenticated", sub: random_string()}) + + {:ok, remote_socket} = + WebsocketClient.connect(self(), uri(tenant, serializer, 4012), serializer, [{"x-api-key", token}]) + + {:ok, socket} = WebsocketClient.connect(self(), uri(tenant, serializer), serializer, [{"x-api-key", token}]) + + config = %{broadcast: %{self: false}, private: false} + topic = "realtime:#{topic}" + + :ok = WebsocketClient.join(remote_socket, topic, %{config: config}) + :ok = WebsocketClient.join(socket, topic, %{config: config}) + + # Send through one socket and receive through the other (self: false) + payload = %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"} + :ok = WebsocketClient.send_event(remote_socket, topic, "broadcast", payload) + + assert_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic}, 2000 + end + end +end diff --git a/test/integration/measure_traffic_test.exs b/test/integration/measure_traffic_test.exs new file mode 100644 index 000000000..5a560536a --- /dev/null +++ b/test/integration/measure_traffic_test.exs @@ -0,0 +1,233 @@ +defmodule Realtime.Integration.MeasureTrafficTest do + use RealtimeWeb.ConnCase, async: false + + alias Phoenix.Socket.Message + alias Realtime.Integration.WebsocketClient + + setup do + tenant = Containers.checkout_tenant(run_migrations: true) + + {:ok, db_conn} = Realtime.Tenants.Connect.lookup_or_start_connection(tenant.external_id) + assert Realtime.Tenants.Connect.ready?(tenant.external_id) + %{db_conn: db_conn, tenant: tenant} + end + + def handle_telemetry(event, measurements, metadata, name) do + tenant = metadata[:tenant] + [key] = Enum.take(event, -1) + value = Map.get(measurements, :sum) || Map.get(measurements, :value) || Map.get(measurements, :size) || 0 + + Agent.update(name, fn state -> + state = + Map.put_new( + state, + tenant, + %{ + joins: 0, + events: 0, + db_events: 0, + presence_events: 0, + output_bytes: 0, + input_bytes: 0 + } + ) + + update_in(state, [metadata[:tenant], key], fn v -> (v || 0) + value end) + end) + end + + defp get_count(event, tenant) do + [key] = Enum.take(event, -1) + + :"TestCounter_#{tenant}" + |> Agent.get(fn state -> get_in(state, [tenant, key]) || 0 end) + end + + describe "measure traffic" do + setup %{tenant: tenant} do + events = [ + [:realtime, :channel, :output_bytes], + [:realtime, :channel, :input_bytes] + ] + + name = :"TestCounter_#{tenant.external_id}" + + {:ok, _} = + start_supervised(%{ + id: 1, + start: {Agent, :start_link, [fn -> %{} end, [name: name]]} + }) + + RateCounterHelper.stop(tenant.external_id) + on_exit(fn -> :telemetry.detach({__MODULE__, tenant.external_id}) end) + :telemetry.attach_many({__MODULE__, tenant.external_id}, events, &__MODULE__.handle_telemetry/4, name) + + measure_traffic_interval_in_ms = Application.get_env(:realtime, :measure_traffic_interval_in_ms) + Application.put_env(:realtime, :measure_traffic_interval_in_ms, 10) + on_exit(fn -> Application.put_env(:realtime, :measure_traffic_interval_in_ms, measure_traffic_interval_in_ms) end) + + :ok + end + + test "measure traffic for broadcast events", %{tenant: tenant} do + {socket, _} = get_connection(tenant) + config = %{broadcast: %{self: true}} + topic = "realtime:any" + + WebsocketClient.join(socket, topic, %{config: config}) + + # Wait for join to complete + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 1000 + assert_receive %Message{topic: ^topic, event: "presence_state"}, 1000 + + for _ <- 1..5 do + WebsocketClient.send_event(socket, topic, "broadcast", %{ + "event" => "TEST", + "payload" => %{"msg" => 1}, + "type" => "broadcast" + }) + + assert_receive %Message{ + event: "broadcast", + payload: %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"}, + topic: ^topic + }, + 500 + end + + # Wait for RateCounter to run + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) + Process.sleep(100) + + output_bytes = get_count([:realtime, :channel, :output_bytes], tenant.external_id) + input_bytes = get_count([:realtime, :channel, :input_bytes], tenant.external_id) + + assert output_bytes > 0 + assert input_bytes > 0 + end + + test "measure traffic for presence events", %{tenant: tenant} do + {socket, _} = get_connection(tenant) + config = %{broadcast: %{self: true}, presence: %{enabled: true}} + topic = "realtime:any" + + WebsocketClient.join(socket, topic, %{config: config}) + + # Wait for join to complete + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 1000 + assert_receive %Message{topic: ^topic, event: "presence_state"}, 1000 + + for _ <- 1..5 do + WebsocketClient.send_event(socket, topic, "presence", %{ + "event" => "TRACK", + "payload" => %{name: "realtime_presence_#{:rand.uniform(1000)}", t: 1814.7000000029802}, + "type" => "presence" + }) + end + + # Wait for RateCounter to run + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) + Process.sleep(100) + + output_bytes = get_count([:realtime, :channel, :output_bytes], tenant.external_id) + input_bytes = get_count([:realtime, :channel, :input_bytes], tenant.external_id) + + assert output_bytes > 0, "Expected output_bytes to be greater than 0, got #{output_bytes}" + assert input_bytes > 0, "Expected input_bytes to be greater than 0, got #{input_bytes}" + end + + test "measure traffic for postgres changes events", %{tenant: tenant, db_conn: db_conn} do + {socket, _} = get_connection(tenant) + config = %{broadcast: %{self: true}, postgres_changes: [%{event: "*", schema: "public"}]} + topic = "realtime:any" + + WebsocketClient.join(socket, topic, %{config: config}) + + # Wait for join to complete + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 1000 + assert_receive %Message{topic: ^topic, event: "presence_state"}, 1000 + + # Wait for postgres_changes subscription to be ready + assert_receive %Message{ + event: "system", + payload: %{ + "channel" => "any", + "extension" => "postgres_changes", + "status" => "ok" + }, + topic: ^topic + }, + 8000 + + for _ <- 1..5 do + Postgrex.query!(db_conn, "INSERT INTO test (details) VALUES ($1)", [random_string()]) + end + + for _ <- 1..5 do + assert_receive %Message{ + event: "postgres_changes", + payload: %{"data" => %{"schema" => "public", "table" => "test", "type" => "INSERT"}}, + topic: ^topic + }, + 500 + end + + # Wait for RateCounter to run + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) + Process.sleep(100) + + output_bytes = get_count([:realtime, :channel, :output_bytes], tenant.external_id) + input_bytes = get_count([:realtime, :channel, :input_bytes], tenant.external_id) + + assert output_bytes > 0, "Expected output_bytes to be greater than 0, got #{output_bytes}" + assert input_bytes > 0, "Expected input_bytes to be greater than 0, got #{input_bytes}" + end + + test "measure traffic for db events", %{tenant: tenant, db_conn: db_conn} do + {socket, _} = get_connection(tenant) + config = %{broadcast: %{self: true}, db: %{enabled: true}} + topic = "realtime:any" + channel_name = "any" + + WebsocketClient.join(socket, topic, %{config: config}) + + # Wait for join to complete + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 1000 + assert_receive %Message{topic: ^topic, event: "presence_state"}, 1000 + + for _ <- 1..5 do + event = random_string() + value = random_string() + + Postgrex.query!( + db_conn, + "SELECT realtime.send (json_build_object ('value', $1 :: text)::jsonb, $2 :: text, $3 :: text, FALSE::bool);", + [value, event, channel_name] + ) + + assert_receive %Message{ + event: "broadcast", + payload: %{ + "event" => ^event, + "payload" => %{"value" => ^value}, + "type" => "broadcast" + }, + topic: ^topic, + join_ref: nil, + ref: nil + }, + 1000 + end + + # Wait for RateCounter to run + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) + Process.sleep(100) + + output_bytes = get_count([:realtime, :channel, :output_bytes], tenant.external_id) + input_bytes = get_count([:realtime, :channel, :input_bytes], tenant.external_id) + + assert output_bytes > 0, "Expected output_bytes to be greater than 0, got #{output_bytes}" + assert input_bytes > 0, "Expected input_bytes to be greater than 0, got #{input_bytes}" + end + end +end diff --git a/test/integration/region_aware_migrations_test.exs b/test/integration/region_aware_migrations_test.exs new file mode 100644 index 000000000..892ed2382 --- /dev/null +++ b/test/integration/region_aware_migrations_test.exs @@ -0,0 +1,70 @@ +defmodule Realtime.Integration.RegionAwareMigrationsTest do + use Realtime.DataCase, async: false + use Mimic + + alias Containers + alias Realtime.Tenants + alias Realtime.Tenants.Migrations + + setup do + {:ok, port} = Containers.checkout() + + settings = [ + %{ + "type" => "postgres_cdc_rls", + "settings" => %{ + "db_host" => "127.0.0.1", + "db_name" => "postgres", + "db_user" => "supabase_admin", + "db_password" => "postgres", + "db_port" => "#{port}", + "poll_interval" => 100, + "poll_max_changes" => 100, + "poll_max_record_bytes" => 1_048_576, + "region" => "ap-southeast-2", + "publication" => "supabase_realtime_test", + "ssl_enforced" => false + } + } + ] + + tenant = tenant_fixture(%{extensions: settings}) + region = Application.get_env(:realtime, :region) + + {:ok, node} = + Clustered.start(nil, + extra_config: [ + {:realtime, :region, Tenants.region(tenant)}, + {:realtime, :master_region, region} + ] + ) + + Process.sleep(100) + + %{tenant: tenant, node: node} + end + + test "run_migrations routes to node in tenant's region with expected arguments", %{tenant: tenant, node: node} do + assert tenant.migrations_ran == 0 + + Realtime.GenRpc + |> Mimic.expect(:call, fn called_node, mod, func, args, opts -> + assert called_node == node + assert mod == Migrations + assert func == :start_migration + assert opts[:tenant_id] == tenant.external_id + + arg = hd(args) + assert arg.tenant_external_id == tenant.external_id + assert arg.migrations_ran == tenant.migrations_ran + assert arg.settings == hd(tenant.extensions).settings + + call_original(Realtime.GenRpc, :call, [node, mod, func, args, opts]) + end) + + assert :ok = Migrations.run_migrations(tenant) + Process.sleep(1000) + tenant = Realtime.Repo.reload!(tenant) + refute tenant.migrations_ran == 0 + end +end diff --git a/test/integration/region_aware_routing_test.exs b/test/integration/region_aware_routing_test.exs new file mode 100644 index 000000000..f9f5178f2 --- /dev/null +++ b/test/integration/region_aware_routing_test.exs @@ -0,0 +1,232 @@ +defmodule Realtime.Integration.RegionAwareRoutingTest do + use Realtime.DataCase, async: false + use Mimic + + alias Realtime.Api + alias Realtime.Api.Tenant + alias Realtime.GenRpc + alias Realtime.Nodes + + setup do + # Configure test runner as non-master region (eu-west-1) with master_region = us-east-1 + original_master_region = Application.get_env(:realtime, :master_region) + + Application.put_env(:realtime, :master_region, "eu-west-2") + + # Start peer node as master region (us-east-1) + # The master node will automatically register itself in RegionNodes on startup + {:ok, master_node} = + Clustered.start(nil, + extra_config: [ + {:realtime, :region, "eu-west-2"}, + {:realtime, :master_region, "eu-west-2"} + ] + ) + + Process.sleep(100) + + on_exit(fn -> + Application.put_env(:realtime, :master_region, original_master_region) + Clustered.stop() + end) + + %{master_node: master_node} + end + + test "create_tenant automatically routes to master region", %{master_node: master_node} do + external_id = "test_routing_#{System.unique_integer([:positive])}" + + attrs = %{ + "external_id" => external_id, + "name" => external_id, + "jwt_secret" => "secret", + "public_key" => "public", + "extensions" => [], + "postgres_cdc_default" => "postgres_cdc_rls", + "max_concurrent_users" => 200, + "max_events_per_second" => 100 + } + + Mimic.expect(Realtime.GenRpc, :call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :create_tenant + assert opts[:tenant_id] == external_id + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + + result = Api.create_tenant(attrs) + + assert {:ok, %Tenant{} = tenant} = result + assert tenant.external_id == external_id + + assert Realtime.Repo.get_by(Tenant, external_id: external_id) + end + + test "update_tenant automatically routes to master region", %{master_node: master_node} do + # Create tenant on master node first + tenant_attrs = %{ + "external_id" => "test_update_#{System.unique_integer([:positive])}", + "name" => "original", + "jwt_secret" => "secret", + "public_key" => "public", + "extensions" => [], + "postgres_cdc_default" => "postgres_cdc_rls", + "max_concurrent_users" => 200, + "max_events_per_second" => 100 + } + + Realtime.GenRpc + |> Mimic.expect(:call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :create_tenant + assert opts[:tenant_id] == tenant_attrs["external_id"] + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + |> Mimic.expect(:call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :update_tenant_by_external_id + assert opts[:tenant_id] == tenant_attrs["external_id"] + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + + tenant = tenant_fixture(tenant_attrs) + + new_name = "updated_via_routing" + result = Api.update_tenant_by_external_id(tenant.external_id, %{name: new_name}) + + assert {:ok, %Tenant{} = updated} = result + assert updated.name == new_name + + reloaded = Realtime.Repo.get(Tenant, tenant.id) + assert reloaded.name == new_name + end + + test "delete_tenant_by_external_id automatically routes to master region", %{master_node: master_node} do + # Create tenant on master node first + tenant_attrs = %{ + "external_id" => "test_delete_#{System.unique_integer([:positive])}", + "name" => "to_delete", + "jwt_secret" => "secret", + "public_key" => "public", + "extensions" => [], + "postgres_cdc_default" => "postgres_cdc_rls", + "max_concurrent_users" => 200, + "max_events_per_second" => 100 + } + + Realtime.GenRpc + |> Mimic.expect(:call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :create_tenant + assert opts[:tenant_id] == tenant_attrs["external_id"] + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + |> Mimic.expect(:call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :delete_tenant_by_external_id + assert opts[:tenant_id] == tenant_attrs["external_id"] + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + + tenant = tenant_fixture(tenant_attrs) + + result = Api.delete_tenant_by_external_id(tenant.external_id) + + assert result == true + + refute Realtime.Repo.get(Tenant, tenant.id) + end + + test "update_migrations_ran automatically routes to master region", %{master_node: master_node} do + # Create tenant on master node first + tenant_attrs = %{ + "external_id" => "test_migrations_#{System.unique_integer([:positive])}", + "name" => "migrations_test", + "jwt_secret" => "secret", + "public_key" => "public", + "extensions" => [], + "postgres_cdc_default" => "postgres_cdc_rls", + "max_concurrent_users" => 200, + "max_events_per_second" => 100, + "migrations_ran" => 0 + } + + Realtime.GenRpc + |> Mimic.expect(:call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :create_tenant + assert opts[:tenant_id] == tenant_attrs["external_id"] + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + |> Mimic.expect(:call, fn node, mod, func, args, opts -> + assert node == master_node + assert mod == Realtime.Api + assert func == :update_migrations_ran + assert opts[:tenant_id] == tenant_attrs["external_id"] + + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end) + + tenant = tenant_fixture(tenant_attrs) + + new_migrations_ran = 5 + result = Api.update_migrations_ran(tenant.external_id, new_migrations_ran) + + assert {:ok, updated} = result + assert updated.migrations_ran == new_migrations_ran + + reloaded = Realtime.Repo.get(Tenant, tenant.id) + assert reloaded.migrations_ran == new_migrations_ran + end + + test "returns error when Nodes.node_from_region returns {:error, :not_available}" do + external_id = "test_error_node_unavailable_#{System.unique_integer([:positive])}" + + attrs = %{ + "external_id" => external_id, + "name" => external_id, + "jwt_secret" => "secret", + "public_key" => "public", + "extensions" => [], + "postgres_cdc_default" => "postgres_cdc_rls", + "max_concurrent_users" => 200, + "max_events_per_second" => 100 + } + + Mimic.expect(Nodes, :node_from_region, fn _region, _key -> {:error, :not_available} end) + result = Api.create_tenant(attrs) + assert {:error, :not_available} = result + end + + test "returns error when GenRpc.call returns {:error, :rpc_error, reason}" do + external_id = "test_error_rpc_error_#{System.unique_integer([:positive])}" + rpc_error_reason = :timeout + + attrs = %{ + "external_id" => external_id, + "name" => external_id, + "jwt_secret" => "secret", + "public_key" => "public", + "extensions" => [], + "postgres_cdc_default" => "postgres_cdc_rls", + "max_concurrent_users" => 200, + "max_events_per_second" => 100 + } + + Mimic.expect(GenRpc, :call, fn _node, _mod, _func, _args, _opts -> {:error, :rpc_error, rpc_error_reason} end) + result = Api.create_tenant(attrs) + assert {:error, ^rpc_error_reason} = result + end +end diff --git a/test/integration/rt_channel_test.exs b/test/integration/rt_channel_test.exs index 806a5ad7e..c4160e4e4 100644 --- a/test/integration/rt_channel_test.exs +++ b/test/integration/rt_channel_test.exs @@ -1,83 +1,36 @@ defmodule Realtime.Integration.RtChannelTest do - # async: false due to the fact that multiple operations against the same tenant and usage of mocks - # Also using dev_tenant due to distributed test - alias Realtime.Api - use RealtimeWeb.ConnCase, async: false - use Mimic + use RealtimeWeb.ConnCase, + async: true, + parameterize: [%{serializer: Phoenix.Socket.V1.JSONSerializer}, %{serializer: RealtimeWeb.Socket.V2Serializer}] + import ExUnit.CaptureLog import Generators - setup :set_mimic_global - require Logger alias Extensions.PostgresCdcRls - alias Phoenix.Socket.Message - alias Phoenix.Socket.V1 - alias Postgrex - alias Realtime.Api.Tenant alias Realtime.Database alias Realtime.Integration.WebsocketClient - alias Realtime.RateCounter alias Realtime.Tenants - alias Realtime.Tenants.Authorization alias Realtime.Tenants.Connect - - alias RealtimeWeb.RealtimeChannel.Tracker + alias Realtime.Tenants.ReplicationConnection alias RealtimeWeb.SocketDisconnect @moduletag :capture_log - @port 4003 - @serializer V1.JSONSerializer - - Application.put_env(:phoenix, TestEndpoint, - https: false, - http: [port: @port], - debug_errors: false, - server: true, - pubsub_server: __MODULE__, - secret_key_base: String.duplicate("a", 64) - ) - - setup_all do - capture_log(fn -> start_supervised!(TestEndpoint) end) - start_supervised!({Phoenix.PubSub, name: __MODULE__}) - :ok - end - setup [:mode] - - describe "postgres changes" do - setup %{tenant: tenant} do - {:ok, conn} = Database.connect(tenant, "realtime_test") - - Database.transaction(conn, fn db_conn -> - queries = [ - "drop table if exists public.test", - "drop publication if exists supabase_realtime_test", - "create sequence if not exists test_id_seq;", - """ - create table if not exists "public"."test" ( - "id" int4 not null default nextval('test_id_seq'::regclass), - "details" text, - primary key ("id")); - """, - "grant all on table public.test to anon;", - "grant all on table public.test to postgres;", - "grant all on table public.test to authenticated;", - "create publication supabase_realtime_test for all tables" - ] - - Enum.each(queries, &Postgrex.query!(db_conn, &1, [])) - end) + setup do + tenant = Containers.checkout_tenant(run_migrations: true) - :ok - end + {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) + assert Connect.ready?(tenant.external_id) + %{db_conn: db_conn, tenant: tenant} + end - test "error subscribing", %{tenant: tenant} do + describe "postgres changes" do + test "error subscribing", %{tenant: tenant, serializer: serializer} do {:ok, conn} = Database.connect(tenant, "realtime_test") # Let's drop the publication to cause an error @@ -85,7 +38,7 @@ defmodule Realtime.Integration.RtChannelTest do Postgrex.query!(db_conn, "drop publication if exists supabase_realtime_test") end) - {socket, _} = get_connection(tenant) + {socket, _} = get_connection(tenant, serializer) topic = "realtime:any" config = %{postgres_changes: [%{event: "INSERT", schema: "public"}]} @@ -99,7 +52,7 @@ defmodule Realtime.Integration.RtChannelTest do "channel" => "any", "extension" => "postgres_changes", "message" => - "{:error, \"Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [event: INSERT, schema: public]\"}", + "Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [schema: public, table: *, filters: []]", "status" => "error" }, ref: nil, @@ -112,8 +65,8 @@ defmodule Realtime.Integration.RtChannelTest do assert log =~ "Unable to subscribe to changes with given parameters" end - test "handle insert", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "handle insert", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) topic = "realtime:any" config = %{postgres_changes: [%{event: "INSERT", schema: "public"}]} @@ -175,8 +128,8 @@ defmodule Realtime.Integration.RtChannelTest do 500 end - test "handle update", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "handle update", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) topic = "realtime:any" config = %{postgres_changes: [%{event: "UPDATE", schema: "public"}]} @@ -242,8 +195,8 @@ defmodule Realtime.Integration.RtChannelTest do 500 end - test "handle delete", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "handle delete", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) topic = "realtime:any" config = %{postgres_changes: [%{event: "DELETE", schema: "public"}]} @@ -307,8 +260,8 @@ defmodule Realtime.Integration.RtChannelTest do 500 end - test "handle wildcard", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "handle wildcard", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) topic = "realtime:any" config = %{postgres_changes: [%{event: "*", schema: "public"}]} @@ -420,8 +373,8 @@ defmodule Realtime.Integration.RtChannelTest do 500 end - test "handle nil postgres changes params as empty param changes", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "handle nil postgres changes params as empty param changes", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) topic = "realtime:any" config = %{postgres_changes: [nil]} @@ -448,8 +401,8 @@ defmodule Realtime.Integration.RtChannelTest do describe "handle broadcast extension" do setup [:rls_context] - test "public broadcast", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "public broadcast", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: true}, private: false} topic = "realtime:any" WebsocketClient.join(socket, topic, %{config: config}) @@ -463,15 +416,17 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic}, 500 end - test "broadcast to another tenant does not get mixed up", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "broadcast to another tenant does not get mixed up", %{tenant: tenant, serializer: serializer} do + other_tenant = Containers.checkout_tenant(run_migrations: true) + + Realtime.Tenants.Cache.update_cache(other_tenant) + + {socket, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: false}, private: false} topic = "realtime:any" WebsocketClient.join(socket, topic, %{config: config}) - other_tenant = Containers.checkout_tenant(run_migrations: true) - - {other_socket, _} = get_connection(other_tenant) + {other_socket, _} = get_connection(other_tenant, serializer) WebsocketClient.join(other_socket, topic, %{config: config}) # Both sockets joined @@ -488,8 +443,12 @@ defmodule Realtime.Integration.RtChannelTest do end @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] - test "private broadcast with valid channel with permissions sends message", %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + test "private broadcast with valid channel with permissions sends message", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} topic = "realtime:#{topic}" WebsocketClient.join(socket, topic, %{config: config}) @@ -503,61 +462,12 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic} end - @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence], - mode: :distributed - test "private broadcast with valid channel with permissions sends message using a remote node (phoenix adapter)", %{ - tenant: tenant, - topic: topic - } do - {:ok, token} = - generate_token(tenant, %{exp: System.system_time(:second) + 1000, role: "authenticated", sub: random_string()}) - - {:ok, remote_socket} = WebsocketClient.connect(self(), uri(tenant, 4012), @serializer, [{"x-api-key", token}]) - {:ok, socket} = WebsocketClient.connect(self(), uri(tenant), @serializer, [{"x-api-key", token}]) - - config = %{broadcast: %{self: false}, private: true} - topic = "realtime:#{topic}" - - WebsocketClient.join(remote_socket, topic, %{config: config}) - WebsocketClient.join(socket, topic, %{config: config}) - - # Send through one socket and receive through the other (self: false) - payload = %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"} - WebsocketClient.send_event(socket, topic, "broadcast", payload) - - assert_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic}, 500 - end - - @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence], - mode: :distributed - test "private broadcast with valid channel with permissions sends message using a remote node", %{ - tenant: tenant, - topic: topic - } do - {:ok, token} = - generate_token(tenant, %{exp: System.system_time(:second) + 1000, role: "authenticated", sub: random_string()}) - - {:ok, remote_socket} = WebsocketClient.connect(self(), uri(tenant, 4012), @serializer, [{"x-api-key", token}]) - {:ok, socket} = WebsocketClient.connect(self(), uri(tenant), @serializer, [{"x-api-key", token}]) - - config = %{broadcast: %{self: false}, private: true} - topic = "realtime:#{topic}" - - WebsocketClient.join(remote_socket, topic, %{config: config}) - WebsocketClient.join(socket, topic, %{config: config}) - - # Send through one socket and receive through the other (self: false) - payload = %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"} - WebsocketClient.send_event(socket, topic, "broadcast", payload) - assert_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic}, 500 - end - @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence], topic: "topic" test "private broadcast with valid channel a colon character sends message and won't intercept in public channels", - %{topic: topic, tenant: tenant} do - {anon_socket, _} = get_connection(tenant, "anon") - {socket, _} = get_connection(tenant, "authenticated") + %{topic: topic, tenant: tenant, serializer: serializer} do + {anon_socket, _} = get_connection(tenant, serializer, role: "anon") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") valid_topic = "realtime:#{topic}" malicious_topic = "realtime:private:#{topic}" @@ -579,17 +489,18 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence] test "private broadcast with valid channel no write permissions won't send message but will receive message", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do config = %{broadcast: %{self: true}, private: true} topic = "realtime:#{topic}" - {service_role_socket, _} = get_connection(tenant, "service_role") + {service_role_socket, _} = get_connection(tenant, serializer, role: "service_role") WebsocketClient.join(service_role_socket, topic, %{config: config}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") WebsocketClient.join(socket, topic, %{config: config}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} @@ -605,12 +516,16 @@ defmodule Realtime.Integration.RtChannelTest do end @tag policies: [] - test "private broadcast with valid channel and no read permissions won't join", %{tenant: tenant, topic: topic} do + test "private broadcast with valid channel and no read permissions won't join", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do config = %{private: true} expected = "Unauthorized: You do not have permissions to read from this Channel topic: #{topic}" topic = "realtime:#{topic}" - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") log = capture_log(fn -> @@ -636,14 +551,18 @@ defmodule Realtime.Integration.RtChannelTest do end @tag policies: [:authenticated_read_broadcast_and_presence] - test "handles lack of connection to database error on private channels", %{tenant: tenant, topic: topic} do + test "handles lack of connection to database error on private channels", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do topic = "realtime:#{topic}" - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") WebsocketClient.join(socket, topic, %{config: %{broadcast: %{self: true}, private: true}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} - {service_role_socket, _} = get_connection(tenant, "service_role") + {service_role_socket, _} = get_connection(tenant, serializer, role: "service_role") WebsocketClient.join(service_role_socket, topic, %{config: %{broadcast: %{self: false}, private: true}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} @@ -653,22 +572,26 @@ defmodule Realtime.Integration.RtChannelTest do :syn.update_registry(Connect, tenant.external_id, fn _pid, meta -> %{meta | conn: nil} end) payload = %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"} WebsocketClient.send_event(service_role_socket, topic, "broadcast", payload) - # Waiting more than 5 seconds as this is the amount of time we will wait for the Connection to be ready - refute_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic}, 6000 + # Waiting more than 15 seconds as this is the amount of time we will wait for the Connection to be ready + refute_receive %Message{event: "broadcast", payload: ^payload, topic: ^topic}, 16000 end) assert log =~ "UnableToHandleBroadcast" end @tag policies: [] - test "lack of connection to database error does not impact public channels", %{tenant: tenant, topic: topic} do + test "lack of connection to database error does not impact public channels", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do topic = "realtime:#{topic}" - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") WebsocketClient.join(socket, topic, %{config: %{broadcast: %{self: true}, private: false}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} - {service_role_socket, _} = get_connection(tenant, "service_role") + {service_role_socket, _} = get_connection(tenant, serializer, role: "service_role") WebsocketClient.join(service_role_socket, topic, %{config: %{broadcast: %{self: false}, private: false}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} @@ -688,8 +611,8 @@ defmodule Realtime.Integration.RtChannelTest do describe "handle presence extension" do setup [:rls_context] - test "public presence", %{tenant: tenant} do - {socket, _} = get_connection(tenant) + test "public presence", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer) config = %{presence: %{key: "", enabled: true}, private: false} topic = "realtime:any" @@ -715,8 +638,8 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "private presence with read and write permissions will be able to track and receive presence changes", - %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{presence: %{key: "", enabled: true}, private: true} topic = "realtime:#{topic}" @@ -740,8 +663,8 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence], mode: :distributed test "private presence with read and write permissions will be able to track and receive presence changes using a remote node", - %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{presence: %{key: "", enabled: true}, private: true} topic = "realtime:#{topic}" @@ -764,9 +687,9 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence] test "private presence with read permissions will be able to receive presence changes but won't be able to track", - %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") - {secondary_socket, _} = get_connection(tenant, "service_role") + %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") + {secondary_socket, _} = get_connection(tenant, serializer, role: "service_role") config = fn key -> %{presence: %{key: key, enabled: true}, private: true} end topic = "realtime:#{topic}" @@ -816,9 +739,13 @@ defmodule Realtime.Integration.RtChannelTest do end @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] - test "handles lack of connection to database error on private channels", %{tenant: tenant, topic: topic} do + test "handles lack of connection to database error on private channels", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do topic = "realtime:#{topic}" - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") WebsocketClient.join(socket, topic, %{config: %{private: true, presence: %{enabled: true}}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} @@ -831,16 +758,20 @@ defmodule Realtime.Integration.RtChannelTest do refute_receive %Message{event: "presence_diff"}, 500 # Waiting more than 5 seconds as this is the amount of time we will wait for the Connection to be ready - refute_receive %Message{event: "phx_leave", topic: ^topic}, 6000 + refute_receive %Message{event: "phx_leave", topic: ^topic}, 16000 end) - assert log =~ "UnableToHandlePresence" + assert log =~ ~r/external_id=#{tenant.external_id}.*UnableToHandlePresence/ end @tag policies: [] - test "lack of connection to database error does not impact public channels", %{tenant: tenant, topic: topic} do + test "lack of connection to database error does not impact public channels", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do topic = "realtime:#{topic}" - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") WebsocketClient.join(socket, topic, %{config: %{private: false, presence: %{enabled: true}}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{event: "presence_state"} @@ -855,16 +786,17 @@ defmodule Realtime.Integration.RtChannelTest do refute_receive %Message{event: "phx_leave", topic: ^topic} end) - refute log =~ "UnableToHandlePresence" + refute log =~ ~r/external_id=#{tenant.external_id}.*UnableToHandlePresence/ end @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "presence enabled if param enabled is set in configuration for private channels", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") topic = "realtime:#{topic}" WebsocketClient.join(socket, topic, %{config: %{private: true, presence: %{enabled: true}}}) @@ -876,9 +808,10 @@ defmodule Realtime.Integration.RtChannelTest do test "presence disabled if param 'enabled' is set to false in configuration for private channels", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") topic = "realtime:#{topic}" WebsocketClient.join(socket, topic, %{config: %{private: true, presence: %{enabled: false}}}) @@ -888,9 +821,10 @@ defmodule Realtime.Integration.RtChannelTest do test "presence enabled if param enabled is set in configuration for public channels", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") topic = "realtime:#{topic}" WebsocketClient.join(socket, topic, %{config: %{private: false, presence: %{enabled: true}}}) @@ -900,15 +834,69 @@ defmodule Realtime.Integration.RtChannelTest do test "presence disabled if param 'enabled' is set to false in configuration for public channels", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") topic = "realtime:#{topic}" WebsocketClient.join(socket, topic, %{config: %{private: false, presence: %{enabled: false}}}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 refute_receive %Message{event: "presence_state"}, 500 end + + test "presence automatically enabled when user sends track message for public channel", %{ + tenant: tenant, + serializer: serializer + } do + {socket, _} = get_connection(tenant, serializer) + config = %{presence: %{key: "", enabled: false}, private: false} + topic = "realtime:any" + + WebsocketClient.join(socket, topic, %{config: config}) + + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 + refute_receive %Message{event: "presence_state"}, 500 + + payload = %{ + type: "presence", + event: "TRACK", + payload: %{name: "realtime_presence_96", t: 1814.7000000029802} + } + + WebsocketClient.send_event(socket, topic, "presence", payload) + + assert_receive %Message{event: "presence_diff", payload: %{"joins" => joins, "leaves" => %{}}, topic: ^topic} + + join_payload = joins |> Map.values() |> hd() |> get_in(["metas"]) |> hd() + assert get_in(join_payload, ["name"]) == payload.payload.name + assert get_in(join_payload, ["t"]) == payload.payload.t + end + + @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] + test "presence automatically enabled when user sends track message for private channel", + %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") + config = %{presence: %{key: "", enabled: false}, private: true} + topic = "realtime:#{topic}" + + WebsocketClient.join(socket, topic, %{config: config}) + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 + refute_receive %Message{event: "presence_state"}, 500 + + payload = %{ + type: "presence", + event: "TRACK", + payload: %{name: "realtime_presence_96", t: 1814.7000000029802} + } + + WebsocketClient.send_event(socket, topic, "presence", payload) + + assert_receive %Message{event: "presence_diff", payload: %{"joins" => joins, "leaves" => %{}}, topic: ^topic}, 500 + join_payload = joins |> Map.values() |> hd() |> get_in(["metas"]) |> hd() + assert get_in(join_payload, ["name"]) == payload.payload.name + assert get_in(join_payload, ["t"]) == payload.payload.t + end end describe "token handling" do @@ -918,32 +906,40 @@ defmodule Realtime.Integration.RtChannelTest do :authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence ] - test "badly formatted jwt token", %{tenant: tenant} do + test "badly formatted jwt token", %{tenant: tenant, serializer: serializer} do log = capture_log(fn -> - WebsocketClient.connect(self(), uri(tenant), @serializer, [{"x-api-key", "bad_token"}]) + WebsocketClient.connect(self(), uri(tenant, serializer), serializer, [{"x-api-key", "bad_token"}]) end) assert log =~ "MalformedJWT: The token provided is not a valid JWT" end - test "invalid JWT with expired token", %{tenant: tenant} do + test "invalid JWT with expired token", %{tenant: tenant, serializer: serializer} do log = capture_log(fn -> - get_connection(tenant, "authenticated", %{:exp => System.system_time(:second) - 1000}, %{log_level: :info}) + get_connection(tenant, serializer, + role: "authenticated", + claims: %{:exp => System.system_time(:second) - 1000}, + params: %{log_level: :info} + ) end) assert log =~ "InvalidJWTToken: Token has expired" end - test "token required the role key", %{tenant: tenant} do + test "token required the role key", %{tenant: tenant, serializer: serializer} do {:ok, token} = token_no_role(tenant) assert {:error, %{status_code: 403}} = - WebsocketClient.connect(self(), uri(tenant), @serializer, [{"x-api-key", token}]) + WebsocketClient.connect(self(), uri(tenant, serializer), serializer, [{"x-api-key", token}]) end - test "handles connection with valid api-header but ignorable access_token payload", %{tenant: tenant, topic: topic} do + test "handles connection with valid api-header but ignorable access_token payload", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do realtime_topic = "realtime:#{topic}" log = @@ -955,7 +951,7 @@ defmodule Realtime.Integration.RtChannelTest do sub: random_string() }) - {:ok, socket} = WebsocketClient.connect(self(), uri(tenant), @serializer, [{"x-api-key", token}]) + {:ok, socket} = WebsocketClient.connect(self(), uri(tenant, serializer), serializer, [{"x-api-key", token}]) WebsocketClient.join(socket, realtime_topic, %{ config: %{broadcast: %{self: true}, private: false}, @@ -971,8 +967,8 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "on new access_token and channel is private policies are reevaluated for read policy", - %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") realtime_topic = "realtime:#{topic}" @@ -1002,9 +998,10 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "on new access_token and channel is private policies are reevaluated for write policy", %{ topic: topic, - tenant: tenant + tenant: tenant, + serializer: serializer } do - {socket, access_token} = get_connection(tenant, "authenticated") + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") realtime_topic = "realtime:#{topic}" config = %{broadcast: %{self: true}, private: true} WebsocketClient.join(socket, realtime_topic, %{config: config, access_token: access_token}) @@ -1041,8 +1038,12 @@ defmodule Realtime.Integration.RtChannelTest do 1500 end - test "on new access_token and channel is public policies are not reevaluated", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "on new access_token and channel is public policies are not reevaluated", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") {:ok, new_token} = token_valid(tenant, "anon") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1057,8 +1058,12 @@ defmodule Realtime.Integration.RtChannelTest do refute_receive %Message{} end - test "on empty string access_token the socket sends an error message", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "on empty string access_token the socket sends an error message", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1083,10 +1088,14 @@ defmodule Realtime.Integration.RtChannelTest do assert msg =~ "The token provided is not a valid JWT" end - test "on expired access_token the socket sends an error message", %{tenant: tenant, topic: topic} do + test "on expired access_token the socket sends an error message", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do sub = random_string() - {socket, access_token} = get_connection(tenant, "authenticated", %{sub: sub}) + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated", claims: %{sub: sub}) config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1098,23 +1107,31 @@ defmodule Realtime.Integration.RtChannelTest do {:ok, token} = generate_token(tenant, %{:exp => System.system_time(:second) - 1000, sub: sub}) log = - capture_log([log_level: :warning], fn -> + capture_log(fn -> WebsocketClient.send_event(socket, realtime_topic, "access_token", %{"access_token" => token}) assert_receive %Message{ topic: ^realtime_topic, event: "system", - payload: %{"extension" => "system", "message" => "Token has expired 1000 seconds ago", "status" => "error"} + payload: %{"extension" => "system", "message" => "Token has expired " <> _, "status" => "error"} } + + assert_receive %Message{event: "phx_close", topic: ^realtime_topic} end) - assert log =~ "ChannelShutdown: Token has expired 1000 seconds ago" + assert log =~ "ChannelShutdown: Token has expired" end - test "ChannelShutdown include sub if available in jwt claims", %{tenant: tenant, topic: topic} do + test "ChannelShutdown include sub if available in jwt claims", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do exp = System.system_time(:second) + 10_000 - {socket, access_token} = get_connection(tenant, "authenticated", %{exp: exp}, %{log_level: :warning}) + {socket, access_token} = + get_connection(tenant, serializer, role: "authenticated", claims: %{exp: exp}, params: %{log_level: :warning}) + config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" sub = random_string() @@ -1126,14 +1143,15 @@ defmodule Realtime.Integration.RtChannelTest do WebsocketClient.send_event(socket, realtime_topic, "access_token", %{"access_token" => token}) assert_receive %Message{event: "system"}, 1000 + assert_receive %Message{event: "phx_close", topic: ^realtime_topic} end) assert log =~ "ChannelShutdown" assert log =~ "sub=#{sub}" end - test "missing claims close connection", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "missing claims close connection", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1160,8 +1178,8 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_close"} end - test "checks token periodically", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "checks token periodically", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1171,7 +1189,8 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 assert_receive %Message{event: "presence_state"}, 500 - {:ok, token} = generate_token(tenant, %{:exp => System.system_time(:second) + 2, role: "authenticated"}) + {:ok, token} = + generate_token(tenant, %{:exp => System.system_time(:second) + 2, role: "authenticated"}) # Update token to be a near expiring token WebsocketClient.send_event(socket, realtime_topic, "access_token", %{"access_token" => token}) @@ -1188,8 +1207,8 @@ defmodule Realtime.Integration.RtChannelTest do assert msg =~ "Token has expired" end - test "token expires in between joins", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "token expires in between joins", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1198,7 +1217,8 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 assert_receive %Message{event: "presence_state"}, 500 - {:ok, access_token} = generate_token(tenant, %{:exp => System.system_time(:second) + 1, role: "authenticated"}) + {:ok, access_token} = + generate_token(tenant, %{:exp => System.system_time(:second) + 1, role: "authenticated"}) # token expires in between joins so it needs to be handled by the channel and not the socket Process.sleep(1000) @@ -1223,8 +1243,8 @@ defmodule Realtime.Integration.RtChannelTest do assert log =~ "#{tenant.external_id}" end - test "token loses claims in between joins", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "token loses claims in between joins", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1254,8 +1274,8 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_close"} end - test "token is badly formatted in between joins", %{tenant: tenant, topic: topic} do - {socket, access_token} = get_connection(tenant, "authenticated") + test "token is badly formatted in between joins", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, access_token} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1282,56 +1302,18 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_close"} end - @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] - test "handles RPC error on token refreshed", %{tenant: tenant, topic: topic} do - Authorization - |> expect(:get_read_authorizations, fn conn, db_conn, context -> - call_original(Authorization, :get_read_authorizations, [conn, db_conn, context]) - end) - |> expect(:get_read_authorizations, fn _, _, _ -> {:error, "RPC Error"} end) - - {socket, access_token} = get_connection(tenant, "authenticated") - config = %{broadcast: %{self: true}, private: true} - realtime_topic = "realtime:#{topic}" - - WebsocketClient.join(socket, realtime_topic, %{config: config, access_token: access_token}) - - assert_receive %Phoenix.Socket.Message{event: "phx_reply"}, 500 - assert_receive %Phoenix.Socket.Message{event: "presence_state"}, 500 - - # Update token to force update - {:ok, access_token} = - generate_token(tenant, %{:exp => System.system_time(:second) + 1000, role: "authenticated"}) - - log = - capture_log([log_level: :warning], fn -> - WebsocketClient.send_event(socket, realtime_topic, "access_token", %{"access_token" => access_token}) - - assert_receive %Phoenix.Socket.Message{ - event: "system", - payload: %{ - "status" => "error", - "extension" => "system", - "message" => "Realtime was unable to connect to the project database" - }, - topic: ^realtime_topic - }, - 500 - - assert_receive %Phoenix.Socket.Message{event: "phx_close", topic: ^realtime_topic} - end) - - assert log =~ "Realtime was unable to connect to the project database" - end - test "on sb prefixed access_token the socket ignores the message and respects JWT expiry time", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do sub = random_string() {socket, access_token} = - get_connection(tenant, "authenticated", %{sub: sub, exp: System.system_time(:second) + 5}) + get_connection(tenant, serializer, + role: "authenticated", + claims: %{sub: sub, exp: System.system_time(:second) + 5} + ) config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1369,9 +1351,10 @@ defmodule Realtime.Integration.RtChannelTest do tenant: tenant, topic: topic, db_conn: db_conn, - table_name: table_name + table_name: table_name, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} topic = "realtime:#{topic}" @@ -1409,10 +1392,11 @@ defmodule Realtime.Integration.RtChannelTest do tenant: tenant, topic: topic, db_conn: db_conn, - table_name: table_name + table_name: table_name, + serializer: serializer } do value = random_string() - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} topic = "realtime:#{topic}" @@ -1452,9 +1436,10 @@ defmodule Realtime.Integration.RtChannelTest do tenant: tenant, topic: topic, db_conn: db_conn, - table_name: table_name + table_name: table_name, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} topic = "realtime:#{topic}" @@ -1492,9 +1477,10 @@ defmodule Realtime.Integration.RtChannelTest do test "broadcast event when function 'send' is called with private topic", %{ tenant: tenant, topic: topic, - db_conn: db_conn + db_conn: db_conn, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} full_topic = "realtime:#{topic}" @@ -1529,9 +1515,10 @@ defmodule Realtime.Integration.RtChannelTest do test "broadcast event when function 'send' is called with public topic", %{ tenant: tenant, topic: topic, - db_conn: db_conn + db_conn: db_conn, + serializer: serializer } do - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} full_topic = "realtime:#{topic}" @@ -1568,11 +1555,11 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "user with only private channels enabled will not be able to join public channels", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do change_tenant_configuration(tenant, :private_only, true) - on_exit(fn -> change_tenant_configuration(tenant, :private_only, false) end) - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} topic = "realtime:#{topic}" @@ -1593,14 +1580,14 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "user with only private channels enabled will be able to join private channels", %{ tenant: tenant, - topic: topic + topic: topic, + serializer: serializer } do change_tenant_configuration(tenant, :private_only, true) - on_exit(fn -> change_tenant_configuration(tenant, :private_only, false) end) Process.sleep(100) - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} topic = "realtime:#{topic}" WebsocketClient.join(socket, topic, %{config: config}) @@ -1612,21 +1599,19 @@ defmodule Realtime.Integration.RtChannelTest do describe "socket disconnect" do setup [:rls_context] - test "tenant already suspended", %{topic: _topic} do - tenant = Containers.checkout_tenant(run_migrations: true) - + test "tenant already suspended", %{tenant: tenant, serializer: serializer} do log = capture_log(fn -> - {:ok, _} = Realtime.Api.update_tenant(tenant, %{suspend: true}) - {:error, %Mint.WebSocket.UpgradeFailureError{}} = get_connection(tenant, "anon") + change_tenant_configuration(tenant, :suspend, true) + {:error, %Mint.WebSocket.UpgradeFailureError{}} = get_connection(tenant, serializer, role: "anon") refute_receive _any end) assert log =~ "RealtimeDisabledForTenant" end - test "on jwks the socket closes and sends a system message", %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + test "on jwks the socket closes and sends a system message", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1634,14 +1619,17 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 assert_receive %Message{event: "presence_state"}, 500 - tenant = Tenants.get_tenant_by_external_id(tenant.external_id) - Realtime.Api.update_tenant(tenant, %{jwt_jwks: %{keys: ["potato"]}}) + Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{jwt_jwks: %{keys: ["potato"]}}) assert_process_down(socket) end - test "on jwt_secret the socket closes and sends a system message", %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + test "on jwt_secret the socket closes and sends a system message", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1650,14 +1638,16 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 assert_receive %Message{event: "presence_state"}, 500 - tenant = Tenants.get_tenant_by_external_id(tenant.external_id) - Realtime.Api.update_tenant(tenant, %{jwt_secret: "potato"}) - + Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{jwt_secret: "potato"}) assert_process_down(socket) end - test "on private_only the socket closes and sends a system message", %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + test "on private_only the socket closes and sends a system message", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1666,14 +1656,16 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 assert_receive %Message{event: "presence_state"}, 500 - tenant = Tenants.get_tenant_by_external_id(tenant.external_id) - Realtime.Api.update_tenant(tenant, %{private_only: true}) - + Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{private_only: true}) assert_process_down(socket) end - test "on other param changes the socket won't close and no message is sent", %{tenant: tenant, topic: topic} do - {socket, _} = get_connection(tenant, "authenticated") + test "on other param changes the socket won't close and no message is sent", %{ + tenant: tenant, + topic: topic, + serializer: serializer + } do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{topic}" @@ -1682,8 +1674,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 assert_receive %Message{event: "presence_state"}, 500 - tenant = Tenants.get_tenant_by_external_id(tenant.external_id) - Realtime.Api.update_tenant(tenant, %{max_concurrent_users: 100}) + Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{max_concurrent_users: 100}) refute_receive %Message{ topic: ^realtime_topic, @@ -1700,17 +1691,24 @@ defmodule Realtime.Integration.RtChannelTest do assert :ok = WebsocketClient.send_heartbeat(socket) end - test "invalid JWT with expired token", %{tenant: tenant} do + test "invalid JWT with expired token", %{tenant: tenant, serializer: serializer} do log = capture_log(fn -> - get_connection(tenant, "authenticated", %{:exp => System.system_time(:second) - 1000}, %{log_level: :info}) + get_connection(tenant, serializer, + role: "authenticated", + claims: %{:exp => System.system_time(:second) - 1000}, + params: %{log_level: :info} + ) end) assert log =~ "InvalidJWTToken: Token has expired" end - test "check registry of SocketDisconnect and on distribution called, kill socket", %{tenant: tenant} do - {socket, _} = get_connection(tenant, "authenticated") + test "check registry of SocketDisconnect and on distribution called, kill socket", %{ + tenant: tenant, + serializer: serializer + } do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} for _ <- 1..10 do @@ -1732,11 +1730,11 @@ defmodule Realtime.Integration.RtChannelTest do describe "rate limits" do setup [:rls_context] - test "max_concurrent_users limit respected", %{tenant: tenant} do + test "max_concurrent_users limit respected", %{tenant: tenant, serializer: serializer} do %{max_concurrent_users: max_concurrent_users} = Tenants.get_tenant_by_external_id(tenant.external_id) change_tenant_configuration(tenant, :max_concurrent_users, 1) - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{random_string()}" WebsocketClient.join(socket, realtime_topic, %{config: config}) @@ -1758,14 +1756,12 @@ defmodule Realtime.Integration.RtChannelTest do change_tenant_configuration(tenant, :max_concurrent_users, max_concurrent_users) end - test "max_events_per_second limit respected", %{tenant: tenant} do - %{max_events_per_second: max_events_per_second} = Tenants.get_tenant_by_external_id(tenant.external_id) - on_exit(fn -> change_tenant_configuration(tenant, :max_events_per_second, max_events_per_second) end) - RateCounter.stop(tenant.external_id) + test "max_events_per_second limit respected", %{tenant: tenant, serializer: serializer} do + RateCounterHelper.stop(tenant.external_id) log = capture_log(fn -> - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} realtime_topic = "realtime:#{random_string()}" @@ -1785,11 +1781,10 @@ defmodule Realtime.Integration.RtChannelTest do assert log =~ "MessagePerSecondRateLimitReached" end - test "max_channels_per_client limit respected", %{tenant: tenant} do - %{max_events_per_second: max_concurrent_users} = Tenants.get_tenant_by_external_id(tenant.external_id) + test "max_channels_per_client limit respected", %{tenant: tenant, serializer: serializer} do change_tenant_configuration(tenant, :max_channels_per_client, 1) - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic_1 = "realtime:#{random_string()}" realtime_topic_2 = "realtime:#{random_string()}" @@ -1820,12 +1815,10 @@ defmodule Realtime.Integration.RtChannelTest do refute_receive %Message{event: "phx_reply", topic: ^realtime_topic_2}, 500 refute_receive %Message{event: "presence_state", topic: ^realtime_topic_2}, 500 - - change_tenant_configuration(tenant, :max_channels_per_client, max_concurrent_users) end - test "max_joins_per_second limit respected", %{tenant: tenant} do - {socket, _} = get_connection(tenant, "authenticated") + test "max_joins_per_second limit respected", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:#{random_string()}" @@ -1838,6 +1831,7 @@ defmodule Realtime.Integration.RtChannelTest do # Wait for RateCounter tick Process.sleep(1000) + # These ones will be blocked for _ <- 1..300 do WebsocketClient.join(socket, realtime_topic, %{config: config}) @@ -1858,9 +1852,8 @@ defmodule Realtime.Integration.RtChannelTest do assert log =~ "project=#{tenant.external_id} external_id=#{tenant.external_id} [critical] ClientJoinRateLimitReached: Too many joins per second" - # Only one log message should be emitted - # Splitting by the error message returns the error message and the rest of the log only - assert length(String.split(log, "ClientJoinRateLimitReached")) == 2 + # Only one or two log messages should be emitted + assert length(String.split(log, "ClientJoinRateLimitReached")) <= 3 end end @@ -1868,8 +1861,8 @@ defmodule Realtime.Integration.RtChannelTest do setup [:rls_context] @tag policies: [:read_matching_user_role, :write_matching_user_role], role: "anon" - test "role policies are respected when accessing the channel", %{tenant: tenant} do - {socket, _} = get_connection(tenant, "anon") + test "role policies are respected when accessing the channel", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "anon") config = %{broadcast: %{self: true}, private: true, presence: %{enabled: false}} topic = random_string() realtime_topic = "realtime:#{topic}" @@ -1878,7 +1871,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^realtime_topic}, 500 - {socket, _} = get_connection(tenant, "potato") + {socket, _} = get_connection(tenant, serializer, role: "potato") topic = random_string() realtime_topic = "realtime:#{topic}" @@ -1888,8 +1881,8 @@ defmodule Realtime.Integration.RtChannelTest do @tag policies: [:authenticated_read_matching_user_sub, :authenticated_write_matching_user_sub], sub: Ecto.UUID.generate() - test "sub policies are respected when accessing the channel", %{tenant: tenant, sub: sub} do - {socket, _} = get_connection(tenant, "authenticated", %{sub: sub}) + test "sub policies are respected when accessing the channel", %{tenant: tenant, sub: sub, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated", claims: %{sub: sub}) config = %{broadcast: %{self: true}, private: true, presence: %{enabled: false}} topic = random_string() realtime_topic = "realtime:#{topic}" @@ -1898,7 +1891,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^realtime_topic}, 500 - {socket, _} = get_connection(tenant, "authenticated", %{sub: Ecto.UUID.generate()}) + {socket, _} = get_connection(tenant, serializer, role: "authenticated", claims: %{sub: Ecto.UUID.generate()}) topic = random_string() realtime_topic = "realtime:#{topic}" @@ -1906,11 +1899,9 @@ defmodule Realtime.Integration.RtChannelTest do refute_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^realtime_topic}, 500 end - @tag role: "authenticated", - policies: [:broken_read_presence, :broken_write_presence] - - test "handle failing rls policy", %{tenant: tenant} do - {socket, _} = get_connection(tenant, "authenticated") + @tag role: "authenticated", policies: [:broken_read_presence, :broken_write_presence] + test "handle failing rls policy", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: true} topic = random_string() realtime_topic = "realtime:#{topic}" @@ -1940,8 +1931,8 @@ defmodule Realtime.Integration.RtChannelTest do end end - test "handle empty topic by closing the socket", %{tenant: tenant} do - {socket, _} = get_connection(tenant, "authenticated") + test "handle empty topic by closing the socket", %{tenant: tenant, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") config = %{broadcast: %{self: true}, private: false} realtime_topic = "realtime:" @@ -1962,20 +1953,35 @@ defmodule Realtime.Integration.RtChannelTest do refute_receive %Message{event: "presence_state"} end - def handle_telemetry(event, %{sum: sum}, metadata, _) do + def handle_telemetry(event, measurements, metadata, name) do tenant = metadata[:tenant] [key] = Enum.take(event, -1) + value = Map.get(measurements, :sum) || Map.get(measurements, :value) || Map.get(measurements, :size) || 0 + + Agent.update(name, fn state -> + state = + Map.put_new( + state, + tenant, + %{ + joins: 0, + events: 0, + db_events: 0, + presence_events: 0, + output_bytes: 0, + input_bytes: 0 + } + ) - Agent.update(TestCounter, fn state -> - state = Map.put_new(state, tenant, %{joins: 0, events: 0, db_events: 0, presence_events: 0}) - update_in(state, [metadata[:tenant], key], fn v -> (v || 0) + sum end) + update_in(state, [metadata[:tenant], key], fn v -> (v || 0) + value end) end) end defp get_count(event, tenant) do [key] = Enum.take(event, -1) - Agent.get(TestCounter, fn state -> get_in(state, [tenant, key]) || 0 end) + :"TestCounter_#{tenant}" + |> Agent.get(fn state -> get_in(state, [tenant, key]) || 0 end) end describe "billable events" do @@ -1987,45 +1993,24 @@ defmodule Realtime.Integration.RtChannelTest do [:realtime, :rate_counter, :channel, :presence_events] ] + name = :"TestCounter_#{tenant.external_id}" + {:ok, _} = start_supervised(%{ id: 1, - start: {Agent, :start_link, [fn -> %{} end, [name: TestCounter]]} + start: {Agent, :start_link, [fn -> %{} end, [name: name]]} }) - RateCounter.stop(tenant.external_id) - on_exit(fn -> :telemetry.detach(__MODULE__) end) - :telemetry.attach_many(__MODULE__, events, &__MODULE__.handle_telemetry/4, []) - - {:ok, conn} = Database.connect(tenant, "realtime_test") - - # Setup for postgres changes - Database.transaction(conn, fn db_conn -> - queries = [ - "drop table if exists public.test", - "drop publication if exists supabase_realtime_test", - "create sequence if not exists test_id_seq;", - """ - create table if not exists "public"."test" ( - "id" int4 not null default nextval('test_id_seq'::regclass), - "details" text, - primary key ("id")); - """, - "grant all on table public.test to anon;", - "grant all on table public.test to postgres;", - "grant all on table public.test to authenticated;", - "create publication supabase_realtime_test for all tables" - ] - - Enum.each(queries, &Postgrex.query!(db_conn, &1, [])) - end) + RateCounterHelper.stop(tenant.external_id) + on_exit(fn -> :telemetry.detach({__MODULE__, tenant.external_id}) end) + :telemetry.attach_many({__MODULE__, tenant.external_id}, events, &__MODULE__.handle_telemetry/4, name) :ok end - test "join events", %{tenant: tenant} do + test "join events", %{tenant: tenant, serializer: serializer} do external_id = tenant.external_id - {socket, _} = get_connection(tenant) + {socket, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: true}, postgres_changes: [%{event: "*", schema: "public"}]} topic = "realtime:any" @@ -2037,7 +2022,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{topic: ^topic, event: "system"}, 5000 # Wait for RateCounter to run - Process.sleep(2000) + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) # Expected billed # 1 joins due to two sockets @@ -2050,21 +2035,21 @@ defmodule Realtime.Integration.RtChannelTest do assert 0 = get_count([:realtime, :rate_counter, :channel, :events], external_id) end - test "broadcast events", %{tenant: tenant} do + test "broadcast events", %{tenant: tenant, serializer: serializer} do external_id = tenant.external_id - {socket, _} = get_connection(tenant) + {socket1, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: true}} topic = "realtime:any" - WebsocketClient.join(socket, topic, %{config: config}) + WebsocketClient.join(socket1, topic, %{config: config}) # Join events assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{topic: ^topic, event: "presence_state"} # Add second client so we can test the "multiplication" of billable events - {socket, _} = get_connection(tenant) - WebsocketClient.join(socket, topic, %{config: config}) + {socket2, _} = get_connection(tenant, serializer) + WebsocketClient.join(socket2, topic, %{config: config}) # Join events assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 @@ -2074,12 +2059,16 @@ defmodule Realtime.Integration.RtChannelTest do payload = %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"} for _ <- 1..5 do - WebsocketClient.send_event(socket, topic, "broadcast", payload) + WebsocketClient.send_event(socket1, topic, "broadcast", payload) + # both sockets + assert_receive %Message{topic: ^topic, event: "broadcast", payload: ^payload} assert_receive %Message{topic: ^topic, event: "broadcast", payload: ^payload} end + refute_receive _any + # Wait for RateCounter to run - Process.sleep(2000) + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) # Expected billed # 2 joins due to two sockets @@ -2092,9 +2081,9 @@ defmodule Realtime.Integration.RtChannelTest do assert 15 = get_count([:realtime, :rate_counter, :channel, :events], external_id) end - test "presence events", %{tenant: tenant} do + test "presence events", %{tenant: tenant, serializer: serializer} do external_id = tenant.external_id - {socket, _} = get_connection(tenant) + {socket, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: true}, presence: %{enabled: true}} topic = "realtime:any" @@ -2114,7 +2103,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "presence_diff", payload: %{"joins" => _, "leaves" => %{}}, topic: ^topic} # Presence events - {socket, _} = get_connection(tenant, "authenticated") + {socket, _} = get_connection(tenant, serializer, role: "authenticated") WebsocketClient.join(socket, topic, %{config: config}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 @@ -2131,7 +2120,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{event: "presence_diff", payload: %{"joins" => _, "leaves" => %{}}, topic: ^topic} # Wait for RateCounter to run - Process.sleep(2000) + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) # Expected billed # 2 joins due to two sockets @@ -2144,9 +2133,9 @@ defmodule Realtime.Integration.RtChannelTest do assert 0 = get_count([:realtime, :rate_counter, :channel, :events], external_id) end - test "postgres changes events", %{tenant: tenant} do + test "postgres changes events", %{tenant: tenant, serializer: serializer} do external_id = tenant.external_id - {socket, _} = get_connection(tenant) + {socket, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: true}, postgres_changes: [%{event: "*", schema: "public"}]} topic = "realtime:any" @@ -2158,7 +2147,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{topic: ^topic, event: "system"}, 5000 # Add second user to test the "multiplication" of billable events - {socket, _} = get_connection(tenant) + {socket, _} = get_connection(tenant, serializer) WebsocketClient.join(socket, topic, %{config: config}) assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}, topic: ^topic}, 300 assert_receive %Message{topic: ^topic, event: "presence_state"}, 500 @@ -2170,7 +2159,7 @@ defmodule Realtime.Integration.RtChannelTest do # Postgres Change events for _ <- 1..5, do: Postgrex.query!(conn, "insert into test (details) values ('test')", []) - for _ <- 1..5 do + for _ <- 1..10 do assert_receive %Message{ topic: ^topic, event: "postgres_changes", @@ -2180,7 +2169,7 @@ defmodule Realtime.Integration.RtChannelTest do end # Wait for RateCounter to run - Process.sleep(2000) + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) # Expected billed # 2 joins due to two sockets @@ -2189,13 +2178,14 @@ defmodule Realtime.Integration.RtChannelTest do # 0 events as no broadcast used assert 2 = get_count([:realtime, :rate_counter, :channel, :joins], external_id) assert 2 = get_count([:realtime, :rate_counter, :channel, :presence_events], external_id) + # (5 for each websocket) assert 10 = get_count([:realtime, :rate_counter, :channel, :db_events], external_id) assert 0 = get_count([:realtime, :rate_counter, :channel, :events], external_id) end - test "postgres changes error events", %{tenant: tenant} do + test "postgres changes error events", %{tenant: tenant, serializer: serializer} do external_id = tenant.external_id - {socket, _} = get_connection(tenant) + {socket, _} = get_connection(tenant, serializer) config = %{broadcast: %{self: true}, postgres_changes: [%{event: "*", schema: "none"}]} topic = "realtime:any" @@ -2207,7 +2197,7 @@ defmodule Realtime.Integration.RtChannelTest do assert_receive %Message{topic: ^topic, event: "system"}, 5000 # Wait for RateCounter to run - Process.sleep(2000) + RateCounterHelper.tick_tenant_rate_counters!(tenant.external_id) # Expected billed # 1 joins due to one socket @@ -2221,126 +2211,133 @@ defmodule Realtime.Integration.RtChannelTest do end end - test "tracks and untracks properly channels", %{tenant: tenant} do - assert [] = Tracker.list_pids() + describe "WAL bloat handling" do + setup %{tenant: tenant} do + topic = random_string() + {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) - {socket, _} = get_connection(tenant) - config = %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} + %{rows: [[max_wal_size]]} = Postgrex.query!(db_conn, "SHOW max_wal_size", []) + %{rows: [[wal_keep_size]]} = Postgrex.query!(db_conn, "SHOW wal_keep_size", []) + %{rows: [[max_slot_wal_keep_size]]} = Postgrex.query!(db_conn, "SHOW max_slot_wal_keep_size", []) - topics = - for _ <- 1..10 do - topic = "realtime:#{random_string()}" - :ok = WebsocketClient.join(socket, topic, %{config: config}) - assert_receive %Message{topic: ^topic, event: "phx_reply"}, 500 - topic - end + assert max_wal_size == "32MB" + assert wal_keep_size == "32MB" + assert max_slot_wal_keep_size == "32MB" - assert [{_pid, count}] = Tracker.list_pids() - assert count == length(topics) + Postgrex.query!(db_conn, "CREATE TABLE IF NOT EXISTS wal_test (id INT, data TEXT)", []) - for topic <- topics do - :ok = WebsocketClient.leave(socket, topic, %{}) - assert_receive %Message{topic: ^topic, event: "phx_close"}, 500 - end + Postgrex.query!( + db_conn, + """ + CREATE OR REPLACE FUNCTION wal_test_trigger_func() RETURNS TRIGGER AS $$ + BEGIN + PERFORM realtime.send(json_build_object ('value', 'test' :: text)::jsonb, 'test', '#{topic}', false); + RETURN NULL; + END; + $$ LANGUAGE plpgsql; + """, + [] + ) - # wait to trigger tracker - assert_process_down(socket, 5000) - assert [] = Tracker.list_pids() - end + Postgrex.query!(db_conn, "DROP TRIGGER IF EXISTS wal_test_trigger ON wal_test", []) - test "failed connections are present in tracker with counter counter lower than 0 so they are actioned on by tracker", - %{tenant: tenant} do - assert [] = Tracker.list_pids() + Postgrex.query!( + db_conn, + """ + CREATE TRIGGER wal_test_trigger + AFTER INSERT OR UPDATE OR DELETE ON wal_test + FOR EACH ROW + EXECUTE FUNCTION wal_test_trigger_func() + """, + [] + ) - {socket, _} = get_connection(tenant) - config = %{broadcast: %{self: true}, private: true, presence: %{enabled: false}} + GenServer.stop(db_conn) - for _ <- 1..10 do - topic = "realtime:#{random_string()}" - :ok = WebsocketClient.join(socket, topic, %{config: config}) - assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "error"}}, 500 - end + on_exit(fn -> + {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) - assert [{_pid, count}] = Tracker.list_pids() - assert count == 0 - end + Postgrex.query!(db_conn, "DROP TABLE IF EXISTS wal_test CASCADE", []) + end) - test "failed connections but one succeeds properly tracks", - %{tenant: tenant} do - assert [] = Tracker.list_pids() + %{topic: topic} + end - {socket, _} = get_connection(tenant) - topic = "realtime:#{random_string()}" + test "track PID changes during WAL bloat creation", %{tenant: tenant, topic: topic, serializer: serializer} do + {socket, _} = get_connection(tenant, serializer, role: "authenticated") + config = %{broadcast: %{self: true}, private: false} + full_topic = "realtime:#{topic}" - :ok = - WebsocketClient.join(socket, topic, %{ - config: %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} - }) + active_slot_query = + "SELECT active_pid FROM pg_replication_slots where active_pid is not null and slot_name = 'supabase_realtime_messages_replication_slot_'" - assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "ok"}}, 500 - assert [{_pid, count}] = Tracker.list_pids() - assert count == 1 + WebsocketClient.join(socket, full_topic, %{config: config}) - for _ <- 1..10 do - topic = "realtime:#{random_string()}" + assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}, 500 + assert_receive %Message{event: "presence_state"}, 500 - :ok = - WebsocketClient.join(socket, topic, %{ - config: %{broadcast: %{self: true}, private: true, presence: %{enabled: false}} - }) + assert Connect.ready?(tenant.external_id) - assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "error"}}, 500 - end + {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) - topic = "realtime:#{random_string()}" + original_connect_pid = Connect.whereis(tenant.external_id) + original_replication_pid = ReplicationConnection.whereis(tenant.external_id) + %{rows: [[original_db_pid]]} = Postgrex.query!(db_conn, active_slot_query, []) - :ok = - WebsocketClient.join(socket, topic, %{ - config: %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} - }) + tasks = + for _ <- 1..5 do + Task.async(fn -> + {:ok, bloat_conn} = Database.connect(tenant, "realtime_bloat", :stop) - assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "ok"}}, 500 - assert [{_pid, count}] = Tracker.list_pids() - assert count == 2 - end + Postgrex.transaction(bloat_conn, fn conn -> + Postgrex.query(conn, "INSERT INTO wal_test SELECT generate_series(1, 100000), repeat('x', 2000)", []) + {:error, "test"} + end) - defp mode(%{mode: :distributed}) do - tenant = Api.get_tenant_by_external_id("dev_tenant") + Process.exit(bloat_conn, :normal) + end) + end - RateCounter.stop(tenant.external_id) - :ets.delete_all_objects(Tracker.table_name()) + Task.await_many(tasks, 20000) - Connect.shutdown(tenant.external_id) - # Sleeping so that syn can forget about this Connect process - Process.sleep(100) + # Kill all pending transactions still running + Postgrex.query!( + db_conn, + "SELECT pg_terminate_backend(pid) from pg_stat_activity where application_name='realtime_bloat'", + [] + ) - on_exit(fn -> - Connect.shutdown(tenant.external_id) - # Sleeping so that syn can forget about this Connect process - Process.sleep(100) - end) + # Does it recover? + assert Connect.ready?(tenant.external_id) + {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) + Process.sleep(1000) + %{rows: [[new_db_pid]]} = Postgrex.query!(db_conn, active_slot_query, []) - on_exit(fn -> Connect.shutdown(tenant.external_id) end) - {:ok, node} = Clustered.start() - region = Tenants.region(tenant) - {:ok, db_conn} = :erpc.call(node, Connect, :connect, ["dev_tenant", region]) - assert Connect.ready?(tenant.external_id) + assert new_db_pid != original_db_pid + assert ^original_connect_pid = Connect.whereis(tenant.external_id) + assert original_replication_pid != ReplicationConnection.whereis(tenant.external_id) - assert node(db_conn) == node - %{db_conn: db_conn, node: node, tenant: tenant} - end + # Check if socket is still connected + payload = %{"event" => "TEST", "payload" => %{"msg" => 1}, "type" => "broadcast"} + WebsocketClient.send_event(socket, full_topic, "broadcast", payload) + assert_receive %Message{event: "broadcast", payload: ^payload, topic: ^full_topic}, 500 - defp mode(_) do - tenant = Containers.checkout_tenant(run_migrations: true) - RateCounter.stop(tenant.external_id) + # Check if we are receiving the message from replication connection + Postgrex.query!(db_conn, "INSERT INTO wal_test VALUES (1, 'test')", []) - :ets.delete_all_objects(Tracker.table_name()) - Realtime.Tenants.Connect.shutdown(tenant.external_id) - # Sleeping so that syn can forget about this Connect process - Process.sleep(100) - {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) - assert Connect.ready?(tenant.external_id) - %{db_conn: db_conn, tenant: tenant} + assert_receive %Message{ + event: "broadcast", + payload: %{ + "event" => "test", + "payload" => %{"value" => "test"}, + "type" => "broadcast" + }, + join_ref: nil, + ref: nil, + topic: ^full_topic + }, + 5000 + end end defp rls_context(%{tenant: tenant} = context) do @@ -2399,15 +2396,16 @@ defmodule Realtime.Integration.RtChannelTest do end defp change_tenant_configuration(%Tenant{external_id: external_id}, limit, value) do - external_id - |> Realtime.Tenants.get_tenant_by_external_id() - |> Realtime.Api.Tenant.changeset(%{limit => value}) - |> Realtime.Repo.update!() + tenant = + external_id + |> Realtime.Tenants.get_tenant_by_external_id() + |> Realtime.Api.Tenant.changeset(%{limit => value}) + |> Realtime.Repo.update!() - Realtime.Tenants.Cache.invalidate_tenant_cache(external_id) + Realtime.Tenants.Cache.update_cache(tenant) end - defp assert_process_down(pid, timeout \\ 100) do + defp assert_process_down(pid, timeout \\ 1000) do ref = Process.monitor(pid) assert_receive {:DOWN, ^ref, :process, ^pid, _reason}, timeout end diff --git a/test/integration/tests.ts b/test/integration/tests.ts new file mode 100644 index 000000000..036255f17 --- /dev/null +++ b/test/integration/tests.ts @@ -0,0 +1,204 @@ +import { RealtimeClient } from "npm:@supabase/supabase-js@latest"; +import { sleep } from "https://deno.land/x/sleep/mod.ts"; +import { describe, it } from "jsr:@std/testing/bdd"; +import { assertEquals } from "jsr:@std/assert"; +import { deadline } from "jsr:@std/async/deadline"; + +const withDeadline = Promise>(fn: Fn, ms: number): Fn => + ((...args) => deadline(fn(...args), ms)) as Fn; + +const url = "http://realtime-dev.localhost:4100/socket"; +const serviceRoleKey = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjIwNzU3NzYzODIsInJlZiI6IjEyNy4wLjAuMSIsInJvbGUiOiJzZXJ2aWNlX3JvbGUiLCJpYXQiOjE3NjA3NzYzODJ9.nupH8pnrOTgK9Xaq8-D4Ry-yQ-PnlXEagTVywQUJVIE" +const apiKey = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjIwNzU2NjE3MjEsInJlZiI6IjEyNy4wLjAuMSIsInJvbGUiOiJhdXRoZW50aWNhdGVkIiwiaWF0IjoxNzYwNjYxNzIxfQ.PxpBoelC9vWQ2OVhmwKBUDEIKgX7MpgSdsnmXw7UdYk"; + +const realtimeV1 = { vsn: '1.0.0', params: { apikey: apiKey } , heartbeatIntervalMs: 5000, timeout: 5000 }; +const realtimeV2 = { vsn: '2.0.0', params: { apikey: apiKey } , heartbeatIntervalMs: 5000, timeout: 5000 }; +const realtimeServiceRole = { vsn: '2.0.0', logger: console.log, params: { apikey: serviceRoleKey } , heartbeatIntervalMs: 5000, timeout: 5000 }; + +let clientV1: RealtimeClient | null; +let clientV2: RealtimeClient | null; + +describe("broadcast extension", { sanitizeOps: false, sanitizeResources: false }, () => { + it("users with different versions can receive self broadcast", withDeadline(async () => { + clientV1 = new RealtimeClient(url, realtimeV1) + clientV2 = new RealtimeClient(url, realtimeV2) + let resultV1 = null; + let resultV2 = null; + let event = crypto.randomUUID(); + let topic = "topic:" + crypto.randomUUID(); + let expectedPayload = { message: crypto.randomUUID() }; + const config = { config: { broadcast: { ack: true, self: true } } }; + + const channelV1 = clientV1 + .channel(topic, config) + .on("broadcast", { event }, ({ payload }) => (resultV1 = payload)) + .subscribe(); + + const channelV2 = clientV2 + .channel(topic, config) + .on("broadcast", { event }, ({ payload }) => (resultV2 = payload)) + .subscribe(); + + while (channelV1.state != "joined" || channelV2.state != "joined") await sleep(0.2); + + // Send from V1 client - both should receive + await channelV1.send({ + type: "broadcast", + event, + payload: expectedPayload, + }); + + while (resultV1 == null || resultV2 == null) await sleep(0.2); + + assertEquals(resultV1, expectedPayload); + assertEquals(resultV2, expectedPayload); + + // Reset results for second test + resultV1 = null; + resultV2 = null; + let expectedPayload2 = { message: crypto.randomUUID() }; + + // Send from V2 client - both should receive + await channelV2.send({ + type: "broadcast", + event, + payload: expectedPayload2, + }); + + while (resultV1 == null || resultV2 == null) await sleep(0.2); + + assertEquals(resultV1, expectedPayload2); + assertEquals(resultV2, expectedPayload2); + + await channelV1.unsubscribe(); + await channelV2.unsubscribe(); + + await stopClient(clientV1); + await stopClient(clientV2); + clientV1 = null; + clientV2 = null; + }, 5000)); + + it("v2 can send/receive binary payload", withDeadline(async () => { + clientV2 = new RealtimeClient(url, realtimeV2) + let result = null; + let event = crypto.randomUUID(); + let topic = "topic:" + crypto.randomUUID(); + const expectedPayload = new ArrayBuffer(2); + const uint8 = new Uint8Array(expectedPayload); // View the buffer as unsigned 8-bit integers + uint8[0] = 125; + uint8[1] = 255; + + const config = { config: { broadcast: { ack: true, self: true } } }; + + const channelV2 = clientV2 + .channel(topic, config) + .on("broadcast", { event }, ({ payload }) => (result = payload)) + .subscribe(); + + while (channelV2.state != "joined") await sleep(0.2); + + await channelV2.send({ + type: "broadcast", + event, + payload: expectedPayload, + }); + + while (result == null) await sleep(0.2); + + assertEquals(result, expectedPayload); + + await channelV2.unsubscribe(); + + await stopClient(clientV2); + clientV2 = null; + }, 5000)); + + it("users with different versions can receive broadcasts from endpoint", withDeadline(async () => { + clientV1 = new RealtimeClient(url, realtimeV1) + clientV2 = new RealtimeClient(url, realtimeV2) + let resultV1 = null; + let resultV2 = null; + let event = crypto.randomUUID(); + let topic = "topic:" + crypto.randomUUID(); + let expectedPayload = { message: crypto.randomUUID() }; + const config = { config: { broadcast: { ack: true, self: true } } }; + + const channelV1 = clientV1 + .channel(topic, config) + .on("broadcast", { event }, ({ payload }) => (resultV1 = payload)) + .subscribe(); + + const channelV2 = clientV2 + .channel(topic, config) + .on("broadcast", { event }, ({ payload }) => (resultV2 = payload)) + .subscribe(); + + while (channelV1.state != "joined" || channelV2.state != "joined") await sleep(0.2); + + // Send from unsubscribed channel - both should receive + new RealtimeClient(url, realtimeServiceRole).channel(topic, config).httpSend(event, expectedPayload); + + while (resultV1 == null || resultV2 == null) await sleep(0.2); + + assertEquals(resultV1, expectedPayload); + assertEquals(resultV2, expectedPayload); + + await channelV1.unsubscribe(); + await channelV2.unsubscribe(); + + await stopClient(clientV1); + await stopClient(clientV2); + clientV1 = null; + clientV2 = null; + }, 5000)); +}); + +// describe("presence extension", () => { +// it("user is able to receive presence updates", async () => { +// let result: any = []; +// let error = null; +// let topic = "topic:" + crypto.randomUUID(); +// let keyV1 = "key V1"; +// let keyV2 = "key V2"; +// +// const configV1 = { config: { presence: { keyV1 } } }; +// const configV2 = { config: { presence: { keyV1 } } }; +// +// const channelV1 = clientV1 +// .channel(topic, configV1) +// .on("presence", { event: "join" }, ({ key, newPresences }) => +// result.push({ key, newPresences }) +// ) +// .subscribe(); +// +// const channelV2 = clientV2 +// .channel(topic, configV2) +// .on("presence", { event: "join" }, ({ key, newPresences }) => +// result.push({ key, newPresences }) +// ) +// .subscribe(); +// +// while (channelV1.state != "joined" || channelV2.state != "joined") await sleep(0.2); +// +// const resV1 = await channelV1.track({ key: keyV1 }); +// const resV2 = await channelV2.track({ key: keyV2 }); +// +// if (resV1 == "timed out" || resV2 == "timed out") error = resV1 || resV2; +// +// sleep(2.2); +// +// // FIXME write assertions +// console.log(result) +// let presences = result[0].newPresences[0]; +// assertEquals(result[0].key, keyV1); +// assertEquals(presences.message, message); +// assertEquals(error, null); +// }); +// }); + +async function stopClient(client: RealtimeClient | null) { + if (client) { + await client.removeAllChannels(); + } +} diff --git a/test/integration/tracker_test.exs b/test/integration/tracker_test.exs new file mode 100644 index 000000000..32b73f65a --- /dev/null +++ b/test/integration/tracker_test.exs @@ -0,0 +1,101 @@ +defmodule Integration.TrackerTest do + # Changing the Tracker ETS table + use RealtimeWeb.ConnCase, async: false + + alias RealtimeWeb.RealtimeChannel.Tracker + alias Phoenix.Socket.Message + alias Realtime.Tenants.Connect + alias Realtime.Integration.WebsocketClient + + setup do + tenant = Containers.checkout_tenant(run_migrations: true) + :ets.delete_all_objects(Tracker.table_name()) + + {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) + assert Connect.ready?(tenant.external_id) + %{db_conn: db_conn, tenant: tenant} + end + + test "tracks and untracks properly channels", %{tenant: tenant} do + {socket, _} = get_connection(tenant) + config = %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} + + topics = + for _ <- 1..10 do + topic = "realtime:#{random_string()}" + :ok = WebsocketClient.join(socket, topic, %{config: config}) + assert_receive %Message{topic: ^topic, event: "phx_reply"}, 500 + topic + end + + for topic <- topics do + :ok = WebsocketClient.leave(socket, topic, %{}) + assert_receive %Message{topic: ^topic, event: "phx_close"}, 500 + end + + start_supervised!({Tracker, check_interval_in_ms: 100}) + # wait to trigger tracker + assert_process_down(socket, 1000) + end + + test "failed connections are present in tracker with counter lower than 0 so they are actioned on by tracker", %{ + tenant: tenant + } do + assert [] = Tracker.list_pids() + + {socket, _} = get_connection(tenant) + config = %{broadcast: %{self: true}, private: true, presence: %{enabled: false}} + + for _ <- 1..10 do + topic = "realtime:#{random_string()}" + :ok = WebsocketClient.join(socket, topic, %{config: config}) + assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "error"}}, 500 + end + + assert [{_pid, count}] = Tracker.list_pids() + assert count == 0 + end + + test "failed connections but one succeeds properly tracks", %{tenant: tenant} do + assert [] = Tracker.list_pids() + + {socket, _} = get_connection(tenant) + topic = "realtime:#{random_string()}" + + :ok = + WebsocketClient.join(socket, topic, %{ + config: %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} + }) + + assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "ok"}}, 500 + assert [{_pid, count}] = Tracker.list_pids() + assert count == 1 + + for _ <- 1..10 do + topic = "realtime:#{random_string()}" + + :ok = + WebsocketClient.join(socket, topic, %{ + config: %{broadcast: %{self: true}, private: true, presence: %{enabled: false}} + }) + + assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "error"}}, 500 + end + + topic = "realtime:#{random_string()}" + + :ok = + WebsocketClient.join(socket, topic, %{ + config: %{broadcast: %{self: true}, private: false, presence: %{enabled: false}} + }) + + assert_receive %Message{topic: ^topic, event: "phx_reply", payload: %{"status" => "ok"}}, 500 + assert [{_pid, count}] = Tracker.list_pids() + assert count == 2 + end + + defp assert_process_down(pid, timeout) do + ref = Process.monitor(pid) + assert_receive {:DOWN, ^ref, :process, ^pid, _reason}, timeout + end +end diff --git a/test/realtime/api_test.exs b/test/realtime/api_test.exs index 1c4a816b0..06d554110 100644 --- a/test/realtime/api_test.exs +++ b/test/realtime/api_test.exs @@ -1,5 +1,5 @@ defmodule Realtime.ApiTest do - use Realtime.DataCase, async: false + use Realtime.DataCase, async: true use Mimic @@ -13,22 +13,24 @@ defmodule Realtime.ApiTest do @db_conf Application.compile_env(:realtime, Realtime.Repo) - setup do - tenant1 = Containers.checkout_tenant(run_migrations: true) - tenant2 = Containers.checkout_tenant(run_migrations: true) - Api.update_tenant(tenant1, %{max_concurrent_users: 10_000_000}) - Api.update_tenant(tenant2, %{max_concurrent_users: 20_000_000}) - - %{tenants: Api.list_tenants(), tenant: tenant1} + defp create_tenants(_) do + tenant1 = tenant_fixture(%{max_concurrent_users: 10_000_000}) + tenant2 = tenant_fixture(%{max_concurrent_users: 20_000_000}) + dev_tenant = Realtime.Api.get_tenant_by_external_id("dev_tenant") + %{tenants: [tenant1, tenant2, dev_tenant]} end describe "list_tenants/0" do + setup [:create_tenants] + test "returns all tenants", %{tenants: tenants} do assert Enum.sort(Api.list_tenants()) == Enum.sort(tenants) end end describe "list_tenants/1" do + setup [:create_tenants] + test "list_tenants/1 returns filtered tenants", %{tenants: tenants} do assert hd(Api.list_tenants(search: hd(tenants).external_id)) == hd(tenants) @@ -38,6 +40,8 @@ defmodule Realtime.ApiTest do end describe "get_tenant!/1" do + setup [:create_tenants] + test "returns the tenant with given id", %{tenants: [tenant | _]} do result = tenant.id |> Api.get_tenant!() |> Map.delete(:extensions) expected = tenant |> Map.delete(:extensions) @@ -51,6 +55,10 @@ defmodule Realtime.ApiTest do external_id = random_string() + expect(Realtime.Tenants.Cache, :global_cache_update, fn tenant -> + assert tenant.external_id == external_id + end) + valid_attrs = %{ external_id: external_id, name: external_id, @@ -85,11 +93,14 @@ defmodule Realtime.ApiTest do end test "invalid data returns error changeset" do + reject(&Realtime.Tenants.Cache.global_cache_update/1) assert {:error, %Ecto.Changeset{}} = Api.create_tenant(%{external_id: nil, jwt_secret: nil, name: nil}) end end - describe "get_tenant_by_external_id/1" do + describe "get_tenant_by_external_id/2" do + setup [:create_tenants] + test "fetch by external id", %{tenants: [tenant | _]} do %Tenant{extensions: [%Extensions{} = extension]} = Api.get_tenant_by_external_id(tenant.external_id) @@ -98,56 +109,78 @@ defmodule Realtime.ApiTest do password = extension.settings["db_password"] assert ^password = "v1QVng3N+pZd/0AEObABwg==" end + + test "fetch by external id using replica", %{tenants: [tenant | _]} do + %Tenant{extensions: [%Extensions{} = extension]} = + Api.get_tenant_by_external_id(tenant.external_id, use_replica?: true) + + assert Map.has_key?(extension.settings, "db_password") + password = extension.settings["db_password"] + assert ^password = "v1QVng3N+pZd/0AEObABwg==" + end + + test "fetch by external id using no replica", %{tenants: [tenant | _]} do + %Tenant{extensions: [%Extensions{} = extension]} = + Api.get_tenant_by_external_id(tenant.external_id, use_replica?: false) + + assert Map.has_key?(extension.settings, "db_password") + password = extension.settings["db_password"] + assert ^password = "v1QVng3N+pZd/0AEObABwg==" + end end - describe "update_tenant/2" do - test "valid data updates the tenant", %{tenant: tenant} do + describe "update_tenant_by_external_id/2" do + setup [:create_tenants] + + test "valid data updates the tenant using external_id", %{tenants: [tenant | _]} do update_attrs = %{ external_id: tenant.external_id, jwt_secret: "some updated jwt_secret", name: "some updated name" } - assert {:ok, %Tenant{} = tenant} = Api.update_tenant(tenant, update_attrs) + assert {:ok, %Tenant{} = tenant} = Api.update_tenant_by_external_id(tenant.external_id, update_attrs) assert tenant.external_id == tenant.external_id assert tenant.jwt_secret == Crypto.encrypt!("some updated jwt_secret") assert tenant.name == "some updated name" end - test "invalid data returns error changeset", %{tenant: tenant} do - assert {:error, %Ecto.Changeset{}} = Api.update_tenant(tenant, %{external_id: nil, jwt_secret: nil, name: nil}) + test "invalid data returns error changeset", %{tenants: [tenant | _]} do + assert {:error, %Ecto.Changeset{}} = + Api.update_tenant_by_external_id(tenant.external_id, %{external_id: nil, jwt_secret: nil, name: nil}) end - test "valid data and jwks change will send disconnect event", %{tenant: tenant} do + test "valid data and jwks change will send disconnect event", %{tenants: [tenant | _]} do :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, "realtime:operations:" <> tenant.external_id) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{jwt_jwks: %{keys: ["test"]}}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{jwt_jwks: %{keys: ["test"]}}) assert_receive :disconnect, 500 end - test "valid data and jwt_secret change will send disconnect event", %{tenant: tenant} do + test "valid data and jwt_secret change will send disconnect event", %{tenants: [tenant | _]} do :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, "realtime:operations:" <> tenant.external_id) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{jwt_secret: "potato"}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{jwt_secret: "potato"}) assert_receive :disconnect, 500 end - test "valid data and suspend change will send disconnect event", %{tenant: tenant} do + test "valid data and suspend change will send disconnect event", %{tenants: [tenant | _]} do :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, "realtime:operations:" <> tenant.external_id) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{suspend: true}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{suspend: true}) assert_receive :disconnect, 500 end - test "valid data but not updating jwt_secret or jwt_jwks won't send event", %{tenant: tenant} do + test "valid data but not updating jwt_secret or jwt_jwks won't send event", %{tenants: [tenant | _]} do :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, "realtime:operations:" <> tenant.external_id) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{max_events_per_second: 100}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{max_events_per_second: 100}) refute_receive :disconnect, 500 end - test "valid data and jwt_secret change will restart the database connection", %{tenant: tenant} do + test "valid data and jwt_secret change will restart the database connection" do + tenant = Containers.checkout_tenant(run_migrations: true) {:ok, old_pid} = Connect.lookup_or_start_connection(tenant.external_id) Process.monitor(old_pid) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{jwt_secret: "potato"}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{jwt_secret: "potato"}) assert_receive {:DOWN, _, :process, ^old_pid, :shutdown}, 500 refute Process.alive?(old_pid) Process.sleep(100) @@ -155,28 +188,36 @@ defmodule Realtime.ApiTest do assert %Postgrex.Result{} = Postgrex.query!(new_pid, "SELECT 1", []) end - test "valid data and suspend change will restart the database connection", %{tenant: tenant} do + test "valid data and suspend change will restart the database connection" do + tenant = Containers.checkout_tenant(run_migrations: true) {:ok, old_pid} = Connect.lookup_or_start_connection(tenant.external_id) Process.monitor(old_pid) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{suspend: true}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{suspend: true}) assert_receive {:DOWN, _, :process, ^old_pid, :shutdown}, 500 refute Process.alive?(old_pid) Process.sleep(100) assert {:error, :tenant_suspended} = Connect.lookup_or_start_connection(tenant.external_id) end - test "valid data and tenant data change will not restart the database connection", %{tenant: tenant} do + test "valid data and tenant data change will not restart the database connection" do + tenant = Containers.checkout_tenant(run_migrations: true) + + expect(Realtime.Tenants.Cache, :global_cache_update, fn tenant -> + assert tenant.max_concurrent_users == 101 + end) + {:ok, old_pid} = Connect.lookup_or_start_connection(tenant.external_id) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{max_concurrent_users: 100}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{max_concurrent_users: 101}) refute_receive {:DOWN, _, :process, ^old_pid, :shutdown}, 500 assert Process.alive?(old_pid) assert {:ok, new_pid} = Connect.lookup_or_start_connection(tenant.external_id) assert old_pid == new_pid end - test "valid data and extensions data change will restart the database connection", %{tenant: tenant} do + test "valid data and extensions data change will restart the database connection" do + tenant = Containers.checkout_tenant(run_migrations: true) config = Realtime.Database.from_tenant(tenant, "realtime_test", :stop) extensions = [ @@ -200,7 +241,7 @@ defmodule Realtime.ApiTest do {:ok, old_pid} = Connect.lookup_or_start_connection(tenant.external_id) Process.monitor(old_pid) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{extensions: extensions}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{extensions: extensions}) assert_receive {:DOWN, _, :process, ^old_pid, :shutdown}, 500 refute Process.alive?(old_pid) Process.sleep(100) @@ -208,22 +249,17 @@ defmodule Realtime.ApiTest do assert %Postgrex.Result{} = Postgrex.query!(new_pid, "SELECT 1", []) end - test "valid data and change to tenant data will refresh cache", %{tenant: tenant} do - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{name: "new_name"}) - assert %Tenant{name: "new_name"} = Realtime.Tenants.Cache.get_tenant_by_external_id(tenant.external_id) - end + test "valid data and change to tenant data will refresh cache", %{tenants: [tenant | _]} do + expect(Realtime.Tenants.Cache, :global_cache_update, fn tenant -> + assert tenant.name == "new_name" + end) - test "valid data and no changes to tenant will not refresh cache", %{tenant: tenant} do - reject(&Realtime.Tenants.Cache.get_tenant_by_external_id/1) - assert {:ok, %Tenant{}} = Api.update_tenant(tenant, %{name: tenant.name}) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{name: "new_name"}) end - end - describe "delete_tenant/1" do - test "deletes the tenant" do - tenant = tenant_fixture() - assert {:ok, %Tenant{}} = Api.delete_tenant(tenant) - assert_raise Ecto.NoResultsError, fn -> Api.get_tenant!(tenant.id) end + test "valid data and no changes to tenant will not refresh cache", %{tenants: [tenant | _]} do + reject(&Realtime.Tenants.Cache.global_cache_update/1) + assert {:ok, %Tenant{}} = Api.update_tenant_by_external_id(tenant.external_id, %{name: tenant.name}) end end @@ -236,11 +272,9 @@ defmodule Realtime.ApiTest do end end - test "list_extensions/1 ", %{tenants: tenants} do - assert length(Api.list_extensions()) == length(tenants) - end - describe "preload_counters/1" do + setup [:create_tenants] + test "preloads counters for a given tenant ", %{tenants: [tenant | _]} do tenant = Repo.reload!(tenant) assert Api.preload_counters(nil) == nil @@ -256,6 +290,7 @@ defmodule Realtime.ApiTest do end describe "rename_settings_field/2" do + @tag skip: "** (Postgrex.Error) ERROR 0A000 (feature_not_supported) cached plan must not change result type" test "renames setting fields" do tenant = tenant_fixture() Api.rename_settings_field("poll_interval_ms", "poll_interval") @@ -340,4 +375,18 @@ defmodule Realtime.ApiTest do refute TestRequiresRestartingDbConnection.check(changeset) end end + + describe "update_migrations_ran/1" do + test "updates migrations_ran to the count of all migrations" do + tenant = tenant_fixture(%{migrations_ran: 0}) + + expect(Realtime.Tenants.Cache, :global_cache_update, fn tenant -> + assert tenant.migrations_ran == 1 + :ok + end) + + assert {:ok, tenant} = Api.update_migrations_ran(tenant.external_id, 1) + assert tenant.migrations_ran == 1 + end + end end diff --git a/test/realtime/database_distributed_test.exs b/test/realtime/database_distributed_test.exs new file mode 100644 index 000000000..43b40743e --- /dev/null +++ b/test/realtime/database_distributed_test.exs @@ -0,0 +1,100 @@ +defmodule Realtime.DatabaseDistributedTest do + # async: false due to usage of Clustered + dev_tenant + use Realtime.DataCase, async: false + + import ExUnit.CaptureLog + + alias Realtime.Database + alias Realtime.Rpc + alias Realtime.Tenants.Connect + + doctest Realtime.Database + def handle_telemetry(event, metadata, content, pid: pid), do: send(pid, {event, metadata, content}) + + setup do + tenant = Containers.checkout_tenant() + :telemetry.attach(__MODULE__, [:realtime, :database, :transaction], &__MODULE__.handle_telemetry/4, pid: self()) + + on_exit(fn -> :telemetry.detach(__MODULE__) end) + + %{tenant: tenant} + end + + @aux_mod (quote do + defmodule DatabaseAux do + def checker(transaction_conn) do + Postgrex.query!(transaction_conn, "SELECT 1", []) + end + + def error(transaction_conn) do + Postgrex.query!(transaction_conn, "SELECT 1/0", []) + end + + def exception(_) do + raise RuntimeError, "💣" + end + end + end) + + Code.eval_quoted(@aux_mod) + + describe "transaction/1 in clustered mode" do + setup do + Connect.shutdown("dev_tenant") + # Waiting for :syn to "unregister" if the Connect process was up + Process.sleep(100) + :ok + end + + test "success call returns output" do + {:ok, node} = Clustered.start(@aux_mod) + {:ok, db_conn} = Rpc.call(node, Connect, :connect, ["dev_tenant", "us-east-1"]) + assert node(db_conn) == node + assert {:ok, %Postgrex.Result{rows: [[1]]}} = Database.transaction(db_conn, &DatabaseAux.checker/1) + end + + test "handles database errors" do + metadata = [external_id: "123", project: "123"] + {:ok, node} = Clustered.start(@aux_mod) + {:ok, db_conn} = Rpc.call(node, Connect, :connect, ["dev_tenant", "us-east-1"]) + assert node(db_conn) == node + + assert capture_log(fn -> + assert {:error, %Postgrex.Error{}} = Database.transaction(db_conn, &DatabaseAux.error/1, [], metadata) + # We have to wait for logs to be relayed to this node + Process.sleep(100) + end) =~ "project=123 external_id=123 [error] ErrorExecutingTransaction:" + end + + test "handles exception" do + metadata = [external_id: "123", project: "123"] + {:ok, node} = Clustered.start(@aux_mod) + {:ok, db_conn} = Rpc.call(node, Connect, :connect, ["dev_tenant", "us-east-1"]) + assert node(db_conn) == node + + assert capture_log(fn -> + assert {:error, %RuntimeError{}} = Database.transaction(db_conn, &DatabaseAux.exception/1, [], metadata) + # We have to wait for logs to be relayed to this node + Process.sleep(100) + end) =~ "project=123 external_id=123 [error] ErrorExecutingTransaction:" + end + + test "db process is not alive anymore" do + metadata = [external_id: "123", project: "123", tenant_id: "123"] + {:ok, node} = Clustered.start(@aux_mod) + # Grab a remote pid that will not exist. :erpc uses a new process to perform the call. + # Once it has returned the process is not alive anymore + + pid = Rpc.call(node, :erlang, :self, []) + assert node(pid) == node + + assert capture_log(fn -> + assert {:error, {:exit, {:noproc, {DBConnection.Holder, :checkout, [^pid, []]}}}} = + Database.transaction(pid, &DatabaseAux.checker/1, [], metadata) + + # We have to wait for logs to be relayed to this node + Process.sleep(100) + end) =~ "project=123 external_id=123 [error] ErrorExecutingTransaction:" + end + end +end diff --git a/test/realtime/database_test.exs b/test/realtime/database_test.exs index f48de14b6..f8e8c8b86 100644 --- a/test/realtime/database_test.exs +++ b/test/realtime/database_test.exs @@ -1,12 +1,9 @@ defmodule Realtime.DatabaseTest do - # async: false due to usage of Clustered - use Realtime.DataCase, async: false + use Realtime.DataCase, async: true import ExUnit.CaptureLog alias Realtime.Database - alias Realtime.Rpc - alias Realtime.Tenants.Connect doctest Realtime.Database def handle_telemetry(event, metadata, content, pid: pid), do: send(pid, {event, metadata, content}) @@ -48,8 +45,7 @@ defmodule Realtime.DatabaseTest do # Connection limit for docker tenant db is 100 @tag db_pool: 50, - subs_pool_size: 21, - subcriber_pool_size: 33 + subs_pool_size: 73 test "restricts connection if tenant database cannot receive more connections based on tenant pool", %{tenant: tenant} do assert capture_log(fn -> @@ -215,84 +211,6 @@ defmodule Realtime.DatabaseTest do end end - @aux_mod (quote do - defmodule DatabaseAux do - def checker(transaction_conn) do - Postgrex.query!(transaction_conn, "SELECT 1", []) - end - - def error(transaction_conn) do - Postgrex.query!(transaction_conn, "SELECT 1/0", []) - end - - def exception(_) do - raise RuntimeError, "💣" - end - end - end) - - Code.eval_quoted(@aux_mod) - - describe "transaction/1 in clustered mode" do - setup do - Connect.shutdown("dev_tenant") - # Waiting for :syn to "unregister" if the Connect process was up - Process.sleep(100) - :ok - end - - test "success call returns output" do - {:ok, node} = Clustered.start(@aux_mod) - {:ok, db_conn} = Rpc.call(node, Connect, :connect, ["dev_tenant", "us-east-1"]) - assert node(db_conn) == node - assert {:ok, %Postgrex.Result{rows: [[1]]}} = Database.transaction(db_conn, &DatabaseAux.checker/1) - end - - test "handles database errors" do - metadata = [external_id: "123", project: "123"] - {:ok, node} = Clustered.start(@aux_mod) - {:ok, db_conn} = Rpc.call(node, Connect, :connect, ["dev_tenant", "us-east-1"]) - assert node(db_conn) == node - - assert capture_log(fn -> - assert {:error, %Postgrex.Error{}} = Database.transaction(db_conn, &DatabaseAux.error/1, [], metadata) - # We have to wait for logs to be relayed to this node - Process.sleep(100) - end) =~ "project=123 external_id=123 [error] ErrorExecutingTransaction:" - end - - test "handles exception" do - metadata = [external_id: "123", project: "123"] - {:ok, node} = Clustered.start(@aux_mod) - {:ok, db_conn} = Rpc.call(node, Connect, :connect, ["dev_tenant", "us-east-1"]) - assert node(db_conn) == node - - assert capture_log(fn -> - assert {:error, %RuntimeError{}} = Database.transaction(db_conn, &DatabaseAux.exception/1, [], metadata) - # We have to wait for logs to be relayed to this node - Process.sleep(100) - end) =~ "project=123 external_id=123 [error] ErrorExecutingTransaction:" - end - - test "db process is not alive anymore" do - metadata = [external_id: "123", project: "123", tenant_id: "123"] - {:ok, node} = Clustered.start(@aux_mod) - # Grab a remote pid that will not exist. :erpc uses a new process to perform the call. - # Once it has returned the process is not alive anymore - - pid = Rpc.call(node, :erlang, :self, []) - assert node(pid) == node - - assert capture_log(fn -> - assert {:error, {:exit, {:noproc, {DBConnection.Holder, :checkout, [^pid, []]}}}} = - Database.transaction(pid, &DatabaseAux.checker/1, [], metadata) - - # We have to wait for logs to be relayed to this node - Process.sleep(100) - end) =~ "project=123 external_id=123 [error] ErrorExecutingTransaction:" - end - end - describe "pool_size_by_application_name/2" do test "returns the number of connections per application name" do assert Database.pool_size_by_application_name("realtime_connect", %{}) == 1 @@ -409,6 +327,6 @@ defmodule Realtime.DatabaseTest do put_in(extension, ["settings", "db_port"], db_port) ] - Realtime.Api.update_tenant(tenant, %{extensions: extensions}) + Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{extensions: extensions}) end end diff --git a/test/realtime/extensions/cdc_rls/cdc_rls_test.exs b/test/realtime/extensions/cdc_rls/cdc_rls_test.exs index 5f341c134..77c54e4ae 100644 --- a/test/realtime/extensions/cdc_rls/cdc_rls_test.exs +++ b/test/realtime/extensions/cdc_rls/cdc_rls_test.exs @@ -1,7 +1,7 @@ defmodule Realtime.Extensions.CdcRlsTest do # async: false due to usage of dev_tenant # Also global mimic mock - use RealtimeWeb.ChannelCase, async: false + use Realtime.DataCase, async: false use Mimic import ExUnit.CaptureLog @@ -9,6 +9,7 @@ defmodule Realtime.Extensions.CdcRlsTest do setup :set_mimic_global alias Extensions.PostgresCdcRls + alias Extensions.PostgresCdcRls.Subscriptions alias PostgresCdcRls.SubscriptionManager alias Postgrex alias Realtime.Api @@ -24,76 +25,35 @@ defmodule Realtime.Extensions.CdcRlsTest do setup do tenant = Containers.checkout_tenant(run_migrations: true) - {:ok, conn} = Database.connect(tenant, "realtime_test") - - Database.transaction(conn, fn db_conn -> - queries = [ - "drop table if exists public.test", - "drop publication if exists supabase_realtime_test", - "create sequence if not exists test_id_seq;", - """ - create table if not exists "public"."test" ( - "id" int4 not null default nextval('test_id_seq'::regclass), - "details" text, - primary key ("id")); - """, - "grant all on table public.test to anon;", - "grant all on table public.test to postgres;", - "grant all on table public.test to authenticated;", - "create publication supabase_realtime_test for all tables" - ] - - Enum.each(queries, &Postgrex.query!(db_conn, &1, [])) - end) - %Tenant{extensions: extensions, external_id: external_id} = tenant postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) args = Map.put(postgres_extension, "id", external_id) - pg_change_params = [ - %{ - id: UUID.uuid1(), - params: %{"event" => "*", "schema" => "public"}, - channel_pid: self(), - claims: %{ - "exp" => System.system_time(:second) + 100_000, - "iat" => 0, - "ref" => "127.0.0.1", - "role" => "anon" - } - } - ] - - ids = - Enum.map(pg_change_params, fn %{id: id, params: params} -> - {UUID.string_to_binary!(id), :erlang.phash2(params)} - end) - - topic = "realtime:test" - serializer = Phoenix.Socket.V1.JSONSerializer - - subscription_metadata = {:subscriber_fastlane, self(), serializer, ids, topic, external_id, true} - metadata = [metadata: subscription_metadata] - :ok = PostgresCdc.subscribe(PostgresCdcRls, pg_change_params, external_id, metadata) + pg_change_params = pubsub_subscribe(external_id) + RealtimeWeb.Endpoint.subscribe(Realtime.Syn.PostgresCdc.syn_topic(tenant.external_id)) # First time it will return nil PostgresCdcRls.handle_connect(args) # Wait for it to start - Process.sleep(3000) + assert_receive %{event: "ready"}, 1000 + + on_exit(fn -> PostgresCdcRls.handle_stop(external_id, 10_000) end) {:ok, response} = PostgresCdcRls.handle_connect(args) # Now subscribe to the Postgres Changes - {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params) + {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params, external_id) - on_exit(fn -> PostgresCdcRls.handle_stop(external_id, 10_000) end) + RealtimeWeb.Endpoint.unsubscribe(Realtime.Syn.PostgresCdc.syn_topic(tenant.external_id)) %{tenant: tenant} end - @tag skip: "Flaky test. When logger handle_sasl_reports is enabled this test doesn't break" - test "Check supervisor crash and respawn", %{tenant: tenant} do + test "supervisor crash must not respawn", %{tenant: tenant} do + scope = Realtime.Syn.PostgresCdc.scope(tenant.external_id) + sup = Enum.reduce_while(1..30, nil, fn _, acc -> - :syn.lookup(Extensions.PostgresCdcRls, tenant.external_id) + scope + |> :syn.lookup(tenant.external_id) |> case do :undefined -> Process.sleep(500) @@ -107,27 +67,22 @@ defmodule Realtime.Extensions.CdcRlsTest do assert Process.alive?(sup) Process.monitor(sup) - RealtimeWeb.Endpoint.subscribe(PostgresCdcRls.syn_topic(tenant.external_id)) + RealtimeWeb.Endpoint.subscribe(Realtime.Syn.PostgresCdc.syn_topic(tenant.external_id)) Process.exit(sup, :kill) - assert_receive {:DOWN, _, :process, ^sup, _reason}, 5000 + scope_down = Atom.to_string(scope) <> "_down" - assert_receive %{event: "ready"}, 5000 - - {sup2, _} = :syn.lookup(Extensions.PostgresCdcRls, tenant.external_id) + assert_receive {:DOWN, _, :process, ^sup, _reason}, 5000 + assert_receive %{event: ^scope_down} + refute_receive %{event: "ready"}, 1000 - assert(sup != sup2) - assert Process.alive?(sup2) + :undefined = :syn.lookup(Realtime.Syn.PostgresCdc.scope(tenant.external_id), tenant.external_id) end test "Subscription manager updates oids", %{tenant: tenant} do {subscriber_manager_pid, conn} = Enum.reduce_while(1..25, nil, fn _, acc -> case PostgresCdcRls.get_manager_conn(tenant.external_id) do - nil -> - Process.sleep(200) - {:cont, acc} - {:error, :wait} -> Process.sleep(200) {:cont, acc} @@ -153,7 +108,10 @@ defmodule Realtime.Extensions.CdcRlsTest do test "Stop tenant supervisor", %{tenant: tenant} do sup = Enum.reduce_while(1..10, nil, fn _, acc -> - case :syn.lookup(Extensions.PostgresCdcRls, tenant.external_id) do + tenant.external_id + |> Realtime.Syn.PostgresCdc.scope() + |> :syn.lookup(tenant.external_id) + |> case do :undefined -> Process.sleep(500) {:cont, acc} @@ -169,6 +127,39 @@ defmodule Realtime.Extensions.CdcRlsTest do end end + describe "handle_after_connect/4" do + setup do + tenant = Containers.checkout_tenant(run_migrations: true) + %{tenant: tenant} + end + + test "subscription error rate limit", %{tenant: tenant} do + %Tenant{extensions: extensions, external_id: external_id} = tenant + postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) + + stub(Subscriptions, :create, fn _conn, _publication, _subscription_list, _manager, _caller -> + {:error, %DBConnection.ConnectionError{}} + end) + + # Now try to subscribe to the Postgres Changes + for _x <- 1..6 do + assert {:error, "Too many database timeouts"} = + PostgresCdcRls.handle_after_connect({:manager_pid, self()}, postgres_extension, %{}, external_id) + end + + rate = Realtime.Tenants.subscription_errors_per_second_rate(external_id, 4) + + assert {:ok, %RateCounter{id: {:channel, :subscription_errors, ^external_id}, sum: 6, limit: %{triggered: true}}} = + RateCounterHelper.tick!(rate) + + # It won't even be called now + reject(&Subscriptions.create/5) + + assert {:error, "Too many database timeouts"} = + PostgresCdcRls.handle_after_connect({:manager_pid, self()}, postgres_extension, %{}, external_id) + end + end + describe "Region rebalancing" do setup do tenant = Containers.checkout_tenant(run_migrations: true) @@ -208,36 +199,7 @@ defmodule Realtime.Extensions.CdcRlsTest do end describe "integration" do - setup do - tenant = Api.get_tenant_by_external_id("dev_tenant") - PostgresCdcRls.handle_stop(tenant.external_id, 10_000) - - {:ok, conn} = Database.connect(tenant, "realtime_test") - - Database.transaction(conn, fn db_conn -> - queries = [ - "drop table if exists public.test", - "drop publication if exists supabase_realtime_test", - "create sequence if not exists test_id_seq;", - """ - create table if not exists "public"."test" ( - "id" int4 not null default nextval('test_id_seq'::regclass), - "details" text, - primary key ("id")); - """, - "grant all on table public.test to anon;", - "grant all on table public.test to postgres;", - "grant all on table public.test to authenticated;", - "create publication supabase_realtime_test for all tables" - ] - - Enum.each(queries, &Postgrex.query!(db_conn, &1, [])) - end) - - RateCounter.stop(tenant.external_id) - - %{tenant: tenant, conn: conn} - end + setup [:integration] test "subscribe inserts", %{tenant: tenant, conn: conn} do on_exit(fn -> PostgresCdcRls.handle_stop(tenant.external_id, 10_000) end) @@ -246,40 +208,26 @@ defmodule Realtime.Extensions.CdcRlsTest do postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) args = Map.put(postgres_extension, "id", external_id) - pg_change_params = [ - %{ - id: UUID.uuid1(), - params: %{"event" => "*", "schema" => "public"}, - channel_pid: self(), - claims: %{ - "exp" => System.system_time(:second) + 100_000, - "iat" => 0, - "ref" => "127.0.0.1", - "role" => "anon" - } - } - ] - - ids = - Enum.map(pg_change_params, fn %{id: id, params: params} -> - {UUID.string_to_binary!(id), :erlang.phash2(params)} - end) - - topic = "realtime:test" - serializer = Phoenix.Socket.V1.JSONSerializer - - subscription_metadata = {:subscriber_fastlane, self(), serializer, ids, topic, external_id, true} - metadata = [metadata: subscription_metadata] - :ok = PostgresCdc.subscribe(PostgresCdcRls, pg_change_params, external_id, metadata) + pg_change_params = pubsub_subscribe(external_id) # First time it will return nil PostgresCdcRls.handle_connect(args) # Wait for it to start - Process.sleep(3000) + assert_receive %{event: "ready"}, 3000 {:ok, response} = PostgresCdcRls.handle_connect(args) + assert_receive { + :telemetry, + [:realtime, :rpc], + %{latency: _}, + %{ + mechanism: :gen_rpc, + success: true + } + } + # Now subscribe to the Postgres Changes - {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params) + {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params, external_id) assert %Postgrex.Result{rows: [[1]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) # Insert a record @@ -287,11 +235,6 @@ defmodule Realtime.Extensions.CdcRlsTest do assert_receive {:socket_push, :text, data}, 5000 - message = - data - |> IO.iodata_to_binary() - |> Jason.decode!() - assert %{ "event" => "postgres_changes", "payload" => %{ @@ -308,71 +251,105 @@ defmodule Realtime.Extensions.CdcRlsTest do }, "ref" => nil, "topic" => "realtime:test" - } = message + } = Jason.decode!(data) + + rate = Realtime.Tenants.db_events_per_second_rate(tenant) + + assert {:ok, %RateCounter{id: {:channel, :db_events, "dev_tenant"}, bucket: bucket}} = + RateCounterHelper.tick!(rate) + + assert Enum.sum(bucket) == 1 + + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: _}, + %{tenant: "dev_tenant", message_type: :postgres_changes} + } + end + + test "db events rate limit works", %{tenant: tenant, conn: conn} do + on_exit(fn -> PostgresCdcRls.handle_stop(tenant.external_id, 10_000) end) - # Wait for RateCounter to update - Process.sleep(2000) + %Tenant{extensions: extensions, external_id: external_id} = tenant + postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) + args = Map.put(postgres_extension, "id", external_id) + + pg_change_params = pubsub_subscribe(external_id) + + # First time it will return nil + PostgresCdcRls.handle_connect(args) + # Wait for it to start + assert_receive %{event: "ready"}, 1000 + {:ok, response} = PostgresCdcRls.handle_connect(args) + + # Now subscribe to the Postgres Changes + {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params, external_id) + assert %Postgrex.Result{rows: [[1]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) rate = Realtime.Tenants.db_events_per_second_rate(tenant) - assert {:ok, %RateCounter{id: {:channel, :db_events, "dev_tenant"}, bucket: bucket}} = RateCounter.get(rate) - assert 1 in bucket + log = + capture_log(fn -> + # increment artifically the counter to reach the limit + tenant.external_id + |> Realtime.Tenants.db_events_per_second_key() + |> Realtime.GenCounter.add(100_000_000) + + RateCounterHelper.tick!(rate) + end) + + assert log =~ "MessagePerSecondRateLimitReached: Too many postgres changes messages per second" + + # Insert a record + %{rows: [[_id]]} = Postgrex.query!(conn, "insert into test (details) values ('test') returning id", []) + + refute_receive {:socket_push, :text, _}, 5000 + + assert {:ok, %RateCounter{id: {:channel, :db_events, "dev_tenant"}, bucket: bucket, limit: %{triggered: true}}} = + RateCounterHelper.tick!(rate) + + # Nothing has changed + assert Enum.sum(bucket) == 100_000_000 end + end - @aux_mod (quote do - defmodule Subscriber do - # Start CDC remotely - def subscribe(tenant) do - %Tenant{extensions: extensions, external_id: external_id} = tenant - postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) - args = Map.put(postgres_extension, "id", external_id) - - # Boot it - PostgresCdcRls.start(args) - # Wait for it to start - Process.sleep(3000) - {:ok, manager, conn} = PostgresCdcRls.get_manager_conn(external_id) - {:ok, {manager, conn}} - end + @aux_mod (quote do + defmodule Subscriber do + # Start CDC remotely + def subscribe(tenant) do + %Tenant{extensions: extensions, external_id: external_id} = tenant + postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) + args = Map.put(postgres_extension, "id", external_id) + + RealtimeWeb.Endpoint.subscribe(Realtime.Syn.PostgresCdc.syn_topic(tenant.external_id)) + # First time it will return nil + PostgresCdcRls.start(args) + # Wait for it to start + assert_receive %{event: "ready"}, 3000 + {:ok, manager, conn} = PostgresCdcRls.get_manager_conn(external_id) + {:ok, {manager, conn}} end - end) + end + end) + describe "distributed integration" do + setup [:integration] - test "subscribe inserts distributed mode", %{tenant: tenant, conn: conn} do + setup(%{tenant: tenant}) do {:ok, node} = Clustered.start(@aux_mod) {:ok, response} = :erpc.call(node, Subscriber, :subscribe, [tenant]) + %{node: node, response: response} + end + + test "subscribe inserts distributed mode", %{tenant: tenant, conn: conn, node: node, response: response} do %Tenant{extensions: extensions, external_id: external_id} = tenant postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) - pg_change_params = [ - %{ - id: UUID.uuid1(), - params: %{"event" => "*", "schema" => "public"}, - channel_pid: self(), - claims: %{ - "exp" => System.system_time(:second) + 100_000, - "iat" => 0, - "ref" => "127.0.0.1", - "role" => "anon" - } - } - ] - - ids = - Enum.map(pg_change_params, fn %{id: id, params: params} -> - {UUID.string_to_binary!(id), :erlang.phash2(params)} - end) - - # Subscribe to the topic as a websocket client - topic = "realtime:test" - serializer = Phoenix.Socket.V1.JSONSerializer - - subscription_metadata = {:subscriber_fastlane, self(), serializer, ids, topic, external_id, true} - metadata = [metadata: subscription_metadata] - :ok = PostgresCdc.subscribe(PostgresCdcRls, pg_change_params, external_id, metadata) + pg_change_params = pubsub_subscribe(external_id) # Now subscribe to the Postgres Changes - {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params) + {:ok, _} = PostgresCdcRls.handle_after_connect(response, postgres_extension, pg_change_params, external_id) assert %Postgrex.Result{rows: [[1]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) # Insert a record @@ -380,11 +357,6 @@ defmodule Realtime.Extensions.CdcRlsTest do assert_receive {:socket_push, :text, data}, 5000 - message = - data - |> IO.iodata_to_binary() - |> Jason.decode!() - assert %{ "event" => "postgres_changes", "payload" => %{ @@ -401,17 +373,120 @@ defmodule Realtime.Extensions.CdcRlsTest do }, "ref" => nil, "topic" => "realtime:test" - } = message + } = Jason.decode!(data) - # Wait for RateCounter to update - Process.sleep(2000) + assert_receive { + :telemetry, + [:realtime, :rpc], + %{latency: _}, + %{ + mechanism: :gen_rpc, + origin_node: _, + success: true, + target_node: ^node + } + } + end - rate = Realtime.Tenants.db_events_per_second_rate(tenant) + test "subscription error rate limit", %{tenant: tenant, node: node} do + %Tenant{extensions: extensions, external_id: external_id} = tenant + postgres_extension = PostgresCdc.filter_settings("postgres_cdc_rls", extensions) - assert {:ok, %RateCounter{id: {:channel, :db_events, "dev_tenant"}, bucket: bucket}} = RateCounter.get(rate) - assert 1 in bucket + pg_change_params = pubsub_subscribe(external_id) - :erpc.call(node, PostgresCdcRls, :handle_stop, [tenant.external_id, 10_000]) + # Grab a process that is not alive to cause subscriptions to error out + pid = :erpc.call(node, :erlang, :self, []) + + # Now subscribe to the Postgres Changes multiple times to reach the rate limit + for _ <- 1..6 do + assert {:error, "Too many database timeouts"} = + PostgresCdcRls.handle_after_connect({pid, pid}, postgres_extension, pg_change_params, external_id) + end + + rate = Realtime.Tenants.subscription_errors_per_second_rate(external_id, 4) + + assert {:ok, %RateCounter{id: {:channel, :subscription_errors, ^external_id}, sum: 6, limit: %{triggered: true}}} = + RateCounterHelper.tick!(rate) + + # It won't even be called now + reject(&Realtime.GenRpc.call/5) + + assert {:error, "Too many database timeouts"} = + PostgresCdcRls.handle_after_connect({pid, pid}, postgres_extension, pg_change_params, external_id) end end + + defp integration(_) do + tenant = Api.get_tenant_by_external_id("dev_tenant") + PostgresCdcRls.handle_stop(tenant.external_id, 10_000) + + {:ok, conn} = Database.connect(tenant, "realtime_test") + + Database.transaction(conn, fn db_conn -> + queries = [ + "drop table if exists public.test", + "drop publication if exists supabase_realtime_test", + "create sequence if not exists test_id_seq;", + """ + create table if not exists "public"."test" ( + "id" int4 not null default nextval('test_id_seq'::regclass), + "details" text, + primary key ("id")); + """, + "grant all on table public.test to anon;", + "grant all on table public.test to postgres;", + "grant all on table public.test to authenticated;", + "create publication supabase_realtime_test for all tables" + ] + + Enum.each(queries, &Postgrex.query!(db_conn, &1, [])) + end) + + RateCounterHelper.stop(tenant.external_id) + on_exit(fn -> RateCounterHelper.stop(tenant.external_id) end) + + on_exit(fn -> :telemetry.detach(__MODULE__) end) + + :telemetry.attach_many( + __MODULE__, + [[:realtime, :tenants, :payload, :size], [:realtime, :rpc]], + &__MODULE__.handle_telemetry/4, + pid: self() + ) + + RealtimeWeb.Endpoint.subscribe(Realtime.Syn.PostgresCdc.syn_topic(tenant.external_id)) + + %{tenant: tenant, conn: conn} + end + + defp pubsub_subscribe(external_id) do + pg_change_params = [ + %{ + id: UUID.uuid1(), + params: %{"event" => "*", "schema" => "public"}, + channel_pid: self(), + claims: %{ + "exp" => System.system_time(:second) + 100_000, + "iat" => 0, + "ref" => "127.0.0.1", + "role" => "anon" + } + } + ] + + topic = "realtime:test" + serializer = Phoenix.Socket.V1.JSONSerializer + + ids = + Enum.map(pg_change_params, fn %{id: id, params: params} -> + {UUID.string_to_binary!(id), :erlang.phash2(params)} + end) + + subscription_metadata = {:subscriber_fastlane, self(), serializer, ids, topic, true} + metadata = [metadata: subscription_metadata] + :ok = PostgresCdc.subscribe(PostgresCdcRls, pg_change_params, external_id, metadata) + pg_change_params + end + + def handle_telemetry(event, measures, metadata, pid: pid), do: send(pid, {:telemetry, event, measures, metadata}) end diff --git a/test/realtime/extensions/cdc_rls/replication_poller_test.exs b/test/realtime/extensions/cdc_rls/replication_poller_test.exs index 97d69af62..0fba63a66 100644 --- a/test/realtime/extensions/cdc_rls/replication_poller_test.exs +++ b/test/realtime/extensions/cdc_rls/replication_poller_test.exs @@ -1,8 +1,12 @@ -defmodule ReplicationPollerTest do - use ExUnit.Case, async: false +defmodule Realtime.Extensions.PostgresCdcRls.ReplicationPollerTest do + # Tweaking application env + use Realtime.DataCase, async: false + use Mimic + + alias Extensions.PostgresCdcRls.MessageDispatcher alias Extensions.PostgresCdcRls.ReplicationPoller, as: Poller - import Poller, only: [generate_record: 1] + alias Extensions.PostgresCdcRls.Replications alias Realtime.Adapters.Changes.{ DeletedRecord, @@ -10,6 +14,284 @@ defmodule ReplicationPollerTest do UpdatedRecord } + alias Realtime.RateCounter + + alias RealtimeWeb.TenantBroadcaster + + import Poller, only: [generate_record: 1] + + setup :set_mimic_global + + @change_json ~s({"table":"test","type":"INSERT","record":{"id": 34, "details": "test"},"columns":[{"name": "id", "type": "int4"}, {"name": "details", "type": "text"}],"errors":null,"schema":"public","commit_timestamp":"2025-10-13T07:50:28.066Z"}) + + describe "poll" do + setup do + :telemetry.attach( + __MODULE__, + [:realtime, :replication, :poller, :query, :stop], + &__MODULE__.handle_telemetry/4, + pid: self() + ) + + on_exit(fn -> :telemetry.detach(__MODULE__) end) + + tenant = Containers.checkout_tenant(run_migrations: true) + + {:ok, tenant} = Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{"max_events_per_second" => 123}) + + subscribers_pids_table = :ets.new(__MODULE__, [:public, :bag]) + subscribers_nodes_table = :ets.new(__MODULE__, [:public, :set]) + + args = + hd(tenant.extensions).settings + |> Map.put("id", tenant.external_id) + |> Map.put("subscribers_pids_table", subscribers_pids_table) + |> Map.put("subscribers_nodes_table", subscribers_nodes_table) + + # unless specified it will return empty results + empty_results = {:ok, %Postgrex.Result{rows: [], num_rows: 0}} + stub(Replications, :list_changes, fn _, _, _, _, _ -> empty_results end) + + %{args: args, tenant: tenant} + end + + test "handles no new changes", %{args: args, tenant: tenant} do + tenant_id = args["id"] + reject(&TenantBroadcaster.pubsub_direct_broadcast/6) + reject(&TenantBroadcaster.pubsub_broadcast/5) + start_link_supervised!({Poller, args}) + + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + rate = Realtime.Tenants.db_events_per_second_rate(tenant) + + assert {:ok, + %RateCounter{ + sum: sum, + limit: %{ + value: 123, + measurement: :avg, + triggered: false + } + }} = RateCounterHelper.tick!(rate) + + assert sum == 0 + end + + test "handles new changes with missing ets table", %{args: args, tenant: tenant} do + tenant_id = args["id"] + + :ets.delete(args["subscribers_nodes_table"]) + + results = + build_result([ + <<71, 36, 83, 212, 168, 9, 17, 240, 165, 186, 118, 202, 193, 157, 232, 187>>, + <<251, 188, 190, 118, 168, 119, 17, 240, 188, 87, 118, 202, 193, 157, 232, 187>> + ]) + + expect(Replications, :list_changes, fn _, _, _, _, _ -> results end) + reject(&TenantBroadcaster.pubsub_direct_broadcast/6) + + # Broadcast to the whole cluster due to missing node information + expect(TenantBroadcaster, :pubsub_broadcast, fn ^tenant_id, + "realtime:postgres:" <> ^tenant_id, + {"INSERT", change_json, _sub_ids}, + MessageDispatcher, + :postgres_changes -> + assert Jason.decode!(change_json) == Jason.decode!(@change_json) + :ok + end) + + start_link_supervised!({Poller, args}) + + # First poll with changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + # Second poll without changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + rate = Realtime.Tenants.db_events_per_second_rate(tenant) + assert {:ok, %RateCounter{sum: sum}} = RateCounterHelper.tick!(rate) + assert sum == 2 + end + + test "handles new changes with no subscription nodes", %{args: args, tenant: tenant} do + tenant_id = args["id"] + + results = + build_result([ + <<71, 36, 83, 212, 168, 9, 17, 240, 165, 186, 118, 202, 193, 157, 232, 187>>, + <<251, 188, 190, 118, 168, 119, 17, 240, 188, 87, 118, 202, 193, 157, 232, 187>> + ]) + + expect(Replications, :list_changes, fn _, _, _, _, _ -> results end) + reject(&TenantBroadcaster.pubsub_direct_broadcast/6) + + # Broadcast to the whole cluster due to missing node information + expect(TenantBroadcaster, :pubsub_broadcast, fn ^tenant_id, + "realtime:postgres:" <> ^tenant_id, + {"INSERT", change_json, _sub_ids}, + MessageDispatcher, + :postgres_changes -> + assert Jason.decode!(change_json) == Jason.decode!(@change_json) + :ok + end) + + start_link_supervised!({Poller, args}) + + # First poll with changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + # Second poll without changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + rate = Realtime.Tenants.db_events_per_second_rate(tenant) + assert {:ok, %RateCounter{sum: sum}} = RateCounterHelper.tick!(rate) + assert sum == 2 + end + + test "handles new changes with missing subscription nodes", %{args: args, tenant: tenant} do + tenant_id = args["id"] + + results = + build_result([ + sub1 = <<71, 36, 83, 212, 168, 9, 17, 240, 165, 186, 118, 202, 193, 157, 232, 187>>, + <<251, 188, 190, 118, 168, 119, 17, 240, 188, 87, 118, 202, 193, 157, 232, 187>> + ]) + + # Only one subscription has node information + :ets.insert(args["subscribers_nodes_table"], {sub1, node()}) + + expect(Replications, :list_changes, fn _, _, _, _, _ -> results end) + reject(&TenantBroadcaster.pubsub_direct_broadcast/6) + + # Broadcast to the whole cluster due to missing node information + expect(TenantBroadcaster, :pubsub_broadcast, fn ^tenant_id, + "realtime:postgres:" <> ^tenant_id, + {"INSERT", change_json, _sub_ids}, + MessageDispatcher, + :postgres_changes -> + assert Jason.decode!(change_json) == Jason.decode!(@change_json) + :ok + end) + + start_link_supervised!({Poller, args}) + + # First poll with changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + # Second poll without changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + rate = Realtime.Tenants.db_events_per_second_rate(tenant) + assert {:ok, %RateCounter{sum: sum}} = RateCounterHelper.tick!(rate) + assert sum == 2 + end + + test "handles new changes with subscription nodes information", %{args: args, tenant: tenant} do + tenant_id = args["id"] + + results = + build_result([ + sub1 = <<71, 36, 83, 212, 168, 9, 17, 240, 165, 186, 118, 202, 193, 157, 232, 187>>, + sub2 = <<251, 188, 190, 118, 168, 119, 17, 240, 188, 87, 118, 202, 193, 157, 232, 187>>, + sub3 = <<49, 59, 209, 112, 173, 77, 17, 240, 191, 41, 118, 202, 193, 157, 232, 187>> + ]) + + # All subscriptions have node information + :ets.insert(args["subscribers_nodes_table"], {sub1, node()}) + :ets.insert(args["subscribers_nodes_table"], {sub2, :"someothernode@127.0.0.1"}) + :ets.insert(args["subscribers_nodes_table"], {sub3, node()}) + + expect(Replications, :list_changes, fn _, _, _, _, _ -> results end) + reject(&TenantBroadcaster.pubsub_broadcast/5) + + topic = "realtime:postgres:" <> tenant_id + + # # Broadcast to the exact nodes only + expect(TenantBroadcaster, :pubsub_direct_broadcast, 2, fn + _node, ^tenant_id, ^topic, {"INSERT", change_json, _sub_ids}, MessageDispatcher, :postgres_changes -> + assert Jason.decode!(change_json) == Jason.decode!(@change_json) + :ok + end) + + start_link_supervised!({Poller, args}) + + # First poll with changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + # Second poll without changes + assert_receive { + :telemetry, + [:realtime, :replication, :poller, :query, :stop], + %{duration: _}, + %{tenant: ^tenant_id} + }, + 500 + + calls = calls(TenantBroadcaster, :pubsub_direct_broadcast, 6) + + assert Enum.count(calls) == 2 + + node_subs = Enum.map(calls, fn [node, _, _, {"INSERT", _change_json, sub_ids}, _, _] -> {node, sub_ids} end) + + assert {node(), MapSet.new([sub1, sub3])} in node_subs + assert {:"someothernode@127.0.0.1", MapSet.new([sub2])} in node_subs + + rate = Realtime.Tenants.db_events_per_second_rate(tenant) + assert {:ok, %RateCounter{sum: sum}} = RateCounterHelper.tick!(rate) + assert sum == 3 + end + end + @columns [ %{"name" => "id", "type" => "int8"}, %{"name" => "details", "type" => "text"}, @@ -19,272 +301,277 @@ defmodule ReplicationPollerTest do @ts "2021-11-05T17:20:51.52406+00:00" @subscription_id "417e76fd-9bc5-4b3e-bd5d-a031389c4a6b" + @subscription_ids MapSet.new(["417e76fd-9bc5-4b3e-bd5d-a031389c4a6b"]) + + @old_record %{"id" => 12} + @record %{"details" => "test", "id" => 12, "user_id" => 1} describe "generate_record/1" do test "INSERT" do - record = [ - {"wal", - %{ - "columns" => @columns, - "commit_timestamp" => @ts, - "record" => %{"details" => "test", "id" => 12, "user_id" => 1}, - "schema" => "public", - "table" => "todos", - "type" => "INSERT" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "INSERT"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", Jason.encode!(@record)}, + {"old_record", nil}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", []} ] - expected = %NewRecord{ - columns: @columns, - commit_timestamp: @ts, - schema: "public", - table: "todos", - type: "INSERT", - subscription_ids: MapSet.new([@subscription_id]), - record: %{"details" => "test", "id" => 12, "user_id" => 1}, - errors: nil - } - - assert expected == generate_record(record) + assert %NewRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "INSERT", + subscription_ids: @subscription_ids, + record: record, + errors: nil + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert record |> Jason.encode!() |> Jason.decode!() == @record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "UPDATE" do - record = [ - {"wal", - %{ - "columns" => @columns, - "commit_timestamp" => @ts, - "old_record" => %{"id" => 12}, - "record" => %{"details" => "test1", "id" => 12, "user_id" => 1}, - "schema" => "public", - "table" => "todos", - "type" => "UPDATE" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "UPDATE"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", Jason.encode!(@record)}, + {"old_record", Jason.encode!(@old_record)}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", []} ] - expected = %UpdatedRecord{ - columns: @columns, - commit_timestamp: @ts, - schema: "public", - table: "todos", - type: "UPDATE", - subscription_ids: MapSet.new([@subscription_id]), - old_record: %{"id" => 12}, - record: %{"details" => "test1", "id" => 12, "user_id" => 1}, - errors: nil - } - - assert expected == generate_record(record) + assert %UpdatedRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "UPDATE", + subscription_ids: @subscription_ids, + record: record, + old_record: old_record, + errors: nil + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert record |> Jason.encode!() |> Jason.decode!() == @record + assert old_record |> Jason.encode!() |> Jason.decode!() == @old_record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "DELETE" do - record = [ - {"wal", - %{ - "columns" => @columns, - "commit_timestamp" => @ts, - "old_record" => %{"id" => 15}, - "schema" => "public", - "table" => "todos", - "type" => "DELETE" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "DELETE"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", nil}, + {"old_record", Jason.encode!(@old_record)}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", []} ] - expected = %DeletedRecord{ - columns: @columns, - commit_timestamp: @ts, - schema: "public", - table: "todos", - type: "DELETE", - subscription_ids: MapSet.new([@subscription_id]), - old_record: %{"id" => 15}, - errors: nil - } - - assert expected == generate_record(record) + assert %DeletedRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "DELETE", + subscription_ids: @subscription_ids, + old_record: old_record, + errors: nil + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert old_record |> Jason.encode!() |> Jason.decode!() == @old_record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "INSERT, large payload error present" do - record = [ - {"wal", - %{ - "columns" => @columns, - "commit_timestamp" => @ts, - "record" => %{"details" => "test", "id" => 12, "user_id" => 1}, - "schema" => "public", - "table" => "todos", - "type" => "INSERT" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "INSERT"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", Jason.encode!(@record)}, + {"old_record", nil}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", ["Error 413: Payload Too Large"]} ] - expected = %NewRecord{ - columns: @columns, - commit_timestamp: @ts, - schema: "public", - table: "todos", - type: "INSERT", - subscription_ids: MapSet.new([@subscription_id]), - record: %{"details" => "test", "id" => 12, "user_id" => 1}, - errors: ["Error 413: Payload Too Large"] - } - - assert expected == generate_record(record) + assert %NewRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "INSERT", + subscription_ids: @subscription_ids, + record: record, + errors: ["Error 413: Payload Too Large"] + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert record |> Jason.encode!() |> Jason.decode!() == @record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "INSERT, other errors present" do - record = [ - {"wal", - %{ - "schema" => "public", - "table" => "todos", - "type" => "INSERT" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "INSERT"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", Jason.encode!(@record)}, + {"old_record", nil}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", ["Error..."]} ] - expected = %NewRecord{ - columns: [], - commit_timestamp: nil, - schema: "public", - table: "todos", - type: "INSERT", - subscription_ids: MapSet.new([@subscription_id]), - record: %{}, - errors: ["Error..."] - } - - assert expected == generate_record(record) + assert %NewRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "INSERT", + subscription_ids: @subscription_ids, + record: record, + errors: ["Error..."] + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert record |> Jason.encode!() |> Jason.decode!() == @record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "UPDATE, large payload error present" do - record = [ - {"wal", - %{ - "columns" => @columns, - "commit_timestamp" => @ts, - "old_record" => %{"details" => "prev test", "id" => 12, "user_id" => 1}, - "record" => %{"details" => "test", "id" => 12, "user_id" => 1}, - "schema" => "public", - "table" => "todos", - "type" => "UPDATE" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "UPDATE"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", Jason.encode!(@record)}, + {"old_record", Jason.encode!(@old_record)}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", ["Error 413: Payload Too Large"]} ] - expected = %UpdatedRecord{ - columns: @columns, - commit_timestamp: @ts, - schema: "public", - table: "todos", - type: "UPDATE", - subscription_ids: MapSet.new([@subscription_id]), - old_record: %{"details" => "prev test", "id" => 12, "user_id" => 1}, - record: %{"details" => "test", "id" => 12, "user_id" => 1}, - errors: ["Error 413: Payload Too Large"] - } - - assert expected == generate_record(record) + assert %UpdatedRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "UPDATE", + subscription_ids: @subscription_ids, + record: record, + old_record: old_record, + errors: ["Error 413: Payload Too Large"] + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert record |> Jason.encode!() |> Jason.decode!() == @record + assert old_record |> Jason.encode!() |> Jason.decode!() == @old_record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "UPDATE, other errors present" do - record = [ - {"wal", - %{ - "schema" => "public", - "table" => "todos", - "type" => "UPDATE" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "UPDATE"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", Jason.encode!(@record)}, + {"old_record", Jason.encode!(@old_record)}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", ["Error..."]} ] - expected = %UpdatedRecord{ - columns: [], - commit_timestamp: nil, - schema: "public", - table: "todos", - type: "UPDATE", - subscription_ids: MapSet.new([@subscription_id]), - old_record: %{}, - record: %{}, - errors: ["Error..."] - } - - assert expected == generate_record(record) + assert %UpdatedRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "UPDATE", + subscription_ids: @subscription_ids, + record: record, + old_record: old_record, + errors: ["Error..."] + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert record |> Jason.encode!() |> Jason.decode!() == @record + assert old_record |> Jason.encode!() |> Jason.decode!() == @old_record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "DELETE, large payload error present" do - record = [ - {"wal", - %{ - "columns" => @columns, - "commit_timestamp" => @ts, - "old_record" => %{"details" => "test", "id" => 12, "user_id" => 1}, - "schema" => "public", - "table" => "todos", - "type" => "DELETE" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "DELETE"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", nil}, + {"old_record", Jason.encode!(@old_record)}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", ["Error 413: Payload Too Large"]} ] - expected = %DeletedRecord{ - columns: @columns, - commit_timestamp: @ts, - schema: "public", - table: "todos", - type: "DELETE", - subscription_ids: MapSet.new([@subscription_id]), - old_record: %{"details" => "test", "id" => 12, "user_id" => 1}, - errors: ["Error 413: Payload Too Large"] - } - - assert expected == generate_record(record) + assert %DeletedRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "DELETE", + subscription_ids: @subscription_ids, + old_record: old_record, + errors: ["Error 413: Payload Too Large"] + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert old_record |> Jason.encode!() |> Jason.decode!() == @old_record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end test "DELETE, other errors present" do - record = [ - {"wal", - %{ - "schema" => "public", - "table" => "todos", - "type" => "DELETE" - }}, - {"is_rls_enabled", false}, + wal_record = [ + {"type", "DELETE"}, + {"schema", "public"}, + {"table", "todos"}, + {"columns", Jason.encode!(@columns)}, + {"record", nil}, + {"old_record", Jason.encode!(@old_record)}, + {"commit_timestamp", @ts}, {"subscription_ids", [@subscription_id]}, {"errors", ["Error..."]} ] - expected = %DeletedRecord{ - columns: [], - commit_timestamp: nil, - schema: "public", - table: "todos", - type: "DELETE", - subscription_ids: MapSet.new([@subscription_id]), - old_record: %{}, - errors: ["Error..."] - } - - assert expected == generate_record(record) + assert %DeletedRecord{ + columns: columns, + commit_timestamp: @ts, + schema: "public", + table: "todos", + type: "DELETE", + subscription_ids: @subscription_ids, + old_record: old_record, + errors: ["Error..."] + } = generate_record(wal_record) + + # Encode then decode to get rid of the fragment + assert old_record |> Jason.encode!() |> Jason.decode!() == @old_record + assert columns |> Jason.encode!() |> Jason.decode!() == @columns end end @@ -305,4 +592,40 @@ defmodule ReplicationPollerTest do assert Poller.slot_name_suffix() == "" end end + + def handle_telemetry(event, measures, metadata, pid: pid), do: send(pid, {:telemetry, event, measures, metadata}) + + defp build_result(subscription_ids) do + {:ok, + %Postgrex.Result{ + command: :select, + columns: [ + "type", + "schema", + "table", + "columns", + "record", + "old_record", + "commit_timestamp", + "subscription_ids", + "errors" + ], + rows: [ + [ + "INSERT", + "public", + "test", + "[{\"name\": \"id\", \"type\": \"int4\"}, {\"name\": \"details\", \"type\": \"text\"}]", + "{\"id\": 34, \"details\": \"test\"}", + nil, + "2025-10-13T07:50:28.066Z", + subscription_ids, + [] + ] + ], + num_rows: 1, + connection_id: 123, + messages: [] + }} + end end diff --git a/test/realtime/extensions/cdc_rls/subscription_manager_test.exs b/test/realtime/extensions/cdc_rls/subscription_manager_test.exs new file mode 100644 index 000000000..3fbde34b5 --- /dev/null +++ b/test/realtime/extensions/cdc_rls/subscription_manager_test.exs @@ -0,0 +1,160 @@ +defmodule Realtime.Extensions.CdcRls.SubscriptionManagerTest do + use Realtime.DataCase, async: true + + alias Extensions.PostgresCdcRls + alias Extensions.PostgresCdcRls.SubscriptionManager + alias Extensions.PostgresCdcRls.Subscriptions + + setup do + tenant = Containers.checkout_tenant(run_migrations: true) + + subscribers_pids_table = :ets.new(__MODULE__, [:public, :bag]) + subscribers_nodes_table = :ets.new(__MODULE__, [:public, :set]) + + args = + hd(tenant.extensions).settings + |> Map.put("id", tenant.external_id) + |> Map.put("subscribers_pids_table", subscribers_pids_table) + |> Map.put("subscribers_nodes_table", subscribers_nodes_table) + + # register this process with syn as if this was the WorkersSupervisor + + scope = Realtime.Syn.PostgresCdc.scope(tenant.external_id) + :syn.register(scope, tenant.external_id, self(), %{region: "us-east-1", manager: nil, subs_pool: nil}) + + {:ok, pid} = SubscriptionManager.start_link(Map.put(args, "id", tenant.external_id)) + # This serves so that we know that handle_continue has finished + :sys.get_state(pid) + %{args: args, pid: pid} + end + + describe "subscription" do + test "subscription", %{pid: pid, args: args} do + {:ok, ^pid, conn} = PostgresCdcRls.get_manager_conn(args["id"]) + {uuid, bin_uuid, pg_change_params} = pg_change_params() + + subscriber = self() + + assert {:ok, [%Postgrex.Result{command: :insert, columns: ["id"], rows: [[1]], num_rows: 1}]} = + Subscriptions.create(conn, args["publication"], [pg_change_params], pid, subscriber) + + # Wait for subscription manager to process the :subscribed message + :sys.get_state(pid) + + node = node() + + assert [{^subscriber, ^uuid, _ref, ^node}] = :ets.tab2list(args["subscribers_pids_table"]) + + assert :ets.tab2list(args["subscribers_nodes_table"]) == [{bin_uuid, node}] + end + + test "subscriber died", %{pid: pid, args: args} do + {:ok, ^pid, conn} = PostgresCdcRls.get_manager_conn(args["id"]) + self = self() + + subscriber = + spawn(fn -> + receive do + :stop -> :ok + end + end) + + {uuid1, bin_uuid1, pg_change_params1} = pg_change_params() + {uuid2, bin_uuid2, pg_change_params2} = pg_change_params() + {uuid3, bin_uuid3, pg_change_params3} = pg_change_params() + + assert {:ok, _} = + Subscriptions.create(conn, args["publication"], [pg_change_params1, pg_change_params2], pid, subscriber) + + assert {:ok, _} = Subscriptions.create(conn, args["publication"], [pg_change_params3], pid, self()) + + # Wait for subscription manager to process the :subscribed message + :sys.get_state(pid) + + node = node() + + assert :ets.info(args["subscribers_pids_table"], :size) == 3 + + assert [{^subscriber, ^uuid1, _, ^node}, {^subscriber, ^uuid2, _, ^node}] = + :ets.lookup(args["subscribers_pids_table"], subscriber) + + assert [{^self, ^uuid3, _ref, ^node}] = :ets.lookup(args["subscribers_pids_table"], self) + + assert :ets.info(args["subscribers_nodes_table"], :size) == 3 + assert [{^bin_uuid1, ^node}] = :ets.lookup(args["subscribers_nodes_table"], bin_uuid1) + assert [{^bin_uuid2, ^node}] = :ets.lookup(args["subscribers_nodes_table"], bin_uuid2) + assert [{^bin_uuid3, ^node}] = :ets.lookup(args["subscribers_nodes_table"], bin_uuid3) + + send(subscriber, :stop) + # Wait for subscription manager to receive the :DOWN message + Process.sleep(200) + + # Only the subscription we have not stopped should remain + + assert [{^self, ^uuid3, _ref, ^node}] = :ets.tab2list(args["subscribers_pids_table"]) + assert [{^bin_uuid3, ^node}] = :ets.tab2list(args["subscribers_nodes_table"]) + end + end + + describe "subscription deletion" do + test "subscription is deleted when process goes away", %{pid: pid, args: args} do + {:ok, ^pid, conn} = PostgresCdcRls.get_manager_conn(args["id"]) + {_uuid, _bin_uuid, pg_change_params} = pg_change_params() + + subscriber = + spawn(fn -> + receive do + :stop -> :ok + end + end) + + assert {:ok, [%Postgrex.Result{command: :insert, columns: ["id"], rows: [[1]], num_rows: 1}]} = + Subscriptions.create(conn, args["publication"], [pg_change_params], pid, subscriber) + + # Wait for subscription manager to process the :subscribed message + :sys.get_state(pid) + + assert :ets.info(args["subscribers_pids_table"], :size) == 1 + assert :ets.info(args["subscribers_nodes_table"], :size) == 1 + + assert %Postgrex.Result{rows: [[1]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + + send(subscriber, :stop) + # Wait for subscription manager to receive the :DOWN message + Process.sleep(200) + + assert :ets.info(args["subscribers_pids_table"], :size) == 0 + assert :ets.info(args["subscribers_nodes_table"], :size) == 0 + + # Force check delete queue on manager + send(pid, :check_delete_queue) + Process.sleep(200) + end + end + + describe "check no users" do + test "exit is sent to manager", %{pid: pid} do + :sys.replace_state(pid, fn state -> %{state | no_users_ts: 0} end) + + send(pid, :check_no_users) + + assert_receive {:system, {^pid, _}, {:terminate, :shutdown}} + end + end + + defp pg_change_params do + uuid = UUID.uuid1() + + pg_change_params = %{ + id: uuid, + subscription_params: {"public", "*", []}, + claims: %{ + "exp" => System.system_time(:second) + 100_000, + "iat" => 0, + "role" => "anon" + } + } + + {uuid, UUID.string_to_binary!(uuid), pg_change_params} + end +end diff --git a/test/realtime/extensions/cdc_rls/subscriptions_checker_distributed_test.exs b/test/realtime/extensions/cdc_rls/subscriptions_checker_distributed_test.exs new file mode 100644 index 000000000..3b459e6c1 --- /dev/null +++ b/test/realtime/extensions/cdc_rls/subscriptions_checker_distributed_test.exs @@ -0,0 +1,66 @@ +defmodule Realtime.Extensions.CdcRls.SubscriptionsCheckerDistributedTest do + # Usage of Clustered + use ExUnit.Case, async: false + import ExUnit.CaptureLog + + alias Extensions.PostgresCdcRls.SubscriptionsChecker, as: Checker + + setup do + {:ok, peer, remote_node} = Clustered.start_disconnected() + true = Node.connect(remote_node) + {:ok, peer: peer, remote_node: remote_node} + end + + describe "not_alive_pids_dist/1" do + test "returns empty list for all alive PIDs", %{remote_node: remote_node} do + assert Checker.not_alive_pids_dist(%{}) == [] + + pid1 = spawn(fn -> Process.sleep(5000) end) + pid2 = spawn(fn -> Process.sleep(5000) end) + pid3 = spawn(fn -> Process.sleep(5000) end) + pid4 = Node.spawn(remote_node, Process, :sleep, [5000]) + + assert Checker.not_alive_pids_dist(%{node() => MapSet.new([pid1, pid2, pid3]), remote_node => MapSet.new([pid4])}) == + [] + end + + test "returns list of dead PIDs", %{remote_node: remote_node} do + pid1 = spawn(fn -> Process.sleep(5000) end) + pid2 = spawn(fn -> Process.sleep(5000) end) + pid3 = spawn(fn -> Process.sleep(5000) end) + pid4 = Node.spawn(remote_node, Process, :sleep, [5000]) + pid5 = Node.spawn(remote_node, Process, :sleep, [5000]) + + Process.exit(pid2, :kill) + Process.exit(pid5, :kill) + + assert Checker.not_alive_pids_dist(%{ + node() => MapSet.new([pid1, pid2, pid3]), + remote_node => MapSet.new([pid4, pid5]) + }) == [pid2, pid5] + end + + test "handles rpc error", %{remote_node: remote_node, peer: peer} do + pid1 = spawn(fn -> Process.sleep(5000) end) + pid2 = spawn(fn -> Process.sleep(5000) end) + pid3 = spawn(fn -> Process.sleep(5000) end) + pid4 = Node.spawn(remote_node, Process, :sleep, [5000]) + pid5 = Node.spawn(remote_node, Process, :sleep, [5000]) + + Process.exit(pid2, :kill) + + # Stop the other node + :peer.stop(peer) + + log = + capture_log(fn -> + assert Checker.not_alive_pids_dist(%{ + node() => MapSet.new([pid1, pid2, pid3]), + remote_node => MapSet.new([pid4, pid5]) + }) == [pid2] + end) + + assert log =~ "UnableToCheckProcessesOnRemoteNode" + end + end +end diff --git a/test/realtime/extensions/cdc_rls/subscriptions_checker_test.exs b/test/realtime/extensions/cdc_rls/subscriptions_checker_test.exs index bfbb4bd7a..db39678ac 100644 --- a/test/realtime/extensions/cdc_rls/subscriptions_checker_test.exs +++ b/test/realtime/extensions/cdc_rls/subscriptions_checker_test.exs @@ -1,9 +1,10 @@ -defmodule SubscriptionsCheckerTest do +defmodule Realtime.Extensions.PostgresCdcRl.SubscriptionsCheckerTest do use ExUnit.Case, async: true alias Extensions.PostgresCdcRls.SubscriptionsChecker, as: Checker + import UUID, only: [uuid1: 0, string_to_binary!: 1] test "subscribers_by_node/1" do - tid = :ets.new(:table, [:public, :bag]) + subscribers_pids_table = :ets.new(:table, [:public, :bag]) test_data = [ {:pid1, "id1", :ref, :node1}, @@ -11,9 +12,9 @@ defmodule SubscriptionsCheckerTest do {:pid2, "id2", :ref, :node2} ] - :ets.insert(tid, test_data) + :ets.insert(subscribers_pids_table, test_data) - assert Checker.subscribers_by_node(tid) == %{ + assert Checker.subscribers_by_node(subscribers_pids_table) == %{ node1: MapSet.new([:pid1]), node2: MapSet.new([:pid2]) } @@ -40,41 +41,66 @@ defmodule SubscriptionsCheckerTest do end end - describe "pop_not_alive_pids/2" do + describe "pop_not_alive_pids/4" do test "one subscription per channel" do - tid = :ets.new(:table, [:public, :bag]) + subscribers_pids_table = :ets.new(:table, [:public, :bag]) + subscribers_nodes_table = :ets.new(:table, [:public, :set]) - uuid1 = UUID.uuid1() - uuid2 = UUID.uuid1() + uuid1 = uuid1() + uuid2 = uuid1() + uuid3 = uuid1() - test_data = [ + pids_test_data = [ {:pid1, uuid1, :ref, :node1}, {:pid1, uuid2, :ref, :node1}, - {:pid2, "uuid", :ref, :node2} + {:pid2, uuid3, :ref, :node2} ] - :ets.insert(tid, test_data) + :ets.insert(subscribers_pids_table, pids_test_data) + + nodes_test_data = [ + {string_to_binary!(uuid1), :node1}, + {string_to_binary!(uuid2), :node1}, + {string_to_binary!(uuid3), :node2} + ] - not_alive = Enum.sort(Checker.pop_not_alive_pids([:pid1], tid, "id")) - expected = Enum.sort([UUID.string_to_binary!(uuid1), UUID.string_to_binary!(uuid2)]) + :ets.insert(subscribers_nodes_table, nodes_test_data) + + not_alive = Enum.sort(Checker.pop_not_alive_pids([:pid1], subscribers_pids_table, subscribers_nodes_table, "id")) + expected = Enum.sort([string_to_binary!(uuid1), string_to_binary!(uuid2)]) assert not_alive == expected - assert :ets.tab2list(tid) == [{:pid2, "uuid", :ref, :node2}] + assert :ets.tab2list(subscribers_pids_table) == [{:pid2, uuid3, :ref, :node2}] + assert :ets.tab2list(subscribers_nodes_table) == [{string_to_binary!(uuid3), :node2}] end test "two subscriptions per channel" do - tid = :ets.new(:table, [:public, :bag]) + subscribers_pids_table = :ets.new(:table, [:public, :bag]) + subscribers_nodes_table = :ets.new(:table, [:public, :set]) - uuid1 = UUID.uuid1() + uuid1 = uuid1() + uuid2 = uuid1() test_data = [ {:pid1, uuid1, :ref, :node1}, - {:pid2, "uuid", :ref, :node2} + {:pid2, uuid2, :ref, :node2} ] - :ets.insert(tid, test_data) - assert Checker.pop_not_alive_pids([:pid1], tid, "id") == [UUID.string_to_binary!(uuid1)] - assert :ets.tab2list(tid) == [{:pid2, "uuid", :ref, :node2}] + :ets.insert(subscribers_pids_table, test_data) + + nodes_test_data = [ + {string_to_binary!(uuid1), :node1}, + {string_to_binary!(uuid2), :node2} + ] + + :ets.insert(subscribers_nodes_table, nodes_test_data) + + assert Checker.pop_not_alive_pids([:pid1], subscribers_pids_table, subscribers_nodes_table, "id") == [ + string_to_binary!(uuid1) + ] + + assert :ets.tab2list(subscribers_pids_table) == [{:pid2, uuid2, :ref, :node2}] + assert :ets.tab2list(subscribers_nodes_table) == [{string_to_binary!(uuid2), :node2}] end end end diff --git a/test/realtime/extensions/cdc_rls/subscriptions_test.exs b/test/realtime/extensions/cdc_rls/subscriptions_test.exs index cb53b72ed..975313861 100644 --- a/test/realtime/extensions/cdc_rls/subscriptions_test.exs +++ b/test/realtime/extensions/cdc_rls/subscriptions_test.exs @@ -1,13 +1,13 @@ -defmodule Realtime.Extensionsubscriptions.CdcRlsSubscriptionsTest do +defmodule Realtime.Extensions.PostgresCdcRls.SubscriptionsTest do use RealtimeWeb.ChannelCase, async: true - doctest Extensions.PostgresCdcRls.Subscriptions + + doctest Extensions.PostgresCdcRls.Subscriptions, import: true alias Extensions.PostgresCdcRls.Subscriptions alias Realtime.Database - alias Realtime.Tenants setup do - tenant = Tenants.get_tenant_by_external_id("dev_tenant") + tenant = Containers.checkout_tenant(run_migrations: true) {:ok, conn} = tenant @@ -16,106 +16,194 @@ defmodule Realtime.Extensionsubscriptions.CdcRlsSubscriptionsTest do |> Keyword.new() |> Postgrex.start_link() + Subscriptions.delete_all(conn) + assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + %{conn: conn} end - test "create", %{conn: conn} do - Subscriptions.delete_all(conn) + describe "create/5" do + test "create all tables & all events", %{conn: conn} do + {:ok, subscription_params} = Subscriptions.parse_subscription_params(%{"event" => "*", "schema" => "public"}) + params_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] - assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + assert {:ok, [%Postgrex.Result{}]} = + Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) - params_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), params: %{"event" => "*", "schema" => "public"}}] + %Postgrex.Result{rows: [[1]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end - assert {:ok, [%Postgrex.Result{}]} = - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) + test "create specific table all events", %{conn: conn} do + {:ok, subscription_params} = Subscriptions.parse_subscription_params(%{"schema" => "public", "table" => "test"}) - Process.sleep(500) + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] - params_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), params: %{"schema" => "public", "table" => "test"}}] + assert {:ok, [%Postgrex.Result{}]} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) - assert {:ok, [%Postgrex.Result{}]} = - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) + %Postgrex.Result{rows: [[1]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end - Process.sleep(500) + test "publication does not exist", %{conn: conn} do + {:ok, subscription_params} = Subscriptions.parse_subscription_params(%{"schema" => "public", "table" => "test"}) - params_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), params: %{}}] + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] - assert {:error, - "No subscription params provided. Please provide at least a `schema` or `table` to subscribe to: %{}"} = - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) + Postgrex.query!(conn, "drop publication if exists supabase_realtime_test", []) - Process.sleep(500) + assert {:error, + {:subscription_insert_failed, + "Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [schema: public, table: test, filters: []]"}} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) - params_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), params: %{"user_token" => "potato"}}] + %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end - assert {:error, - "No subscription params provided. Please provide at least a `schema` or `table` to subscribe to: "} = - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) + test "table does not exist", %{conn: conn} do + {:ok, subscription_params} = + Subscriptions.parse_subscription_params(%{"schema" => "public", "table" => "doesnotexist"}) - Process.sleep(500) + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] - params_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), params: %{"auth_token" => "potato"}}] + assert {:error, + {:subscription_insert_failed, + "Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [schema: public, table: doesnotexist, filters: []]"}} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) - assert {:error, - "No subscription params provided. Please provide at least a `schema` or `table` to subscribe to: "} = - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) + %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end - Process.sleep(500) + test "column does not exist", %{conn: conn} do + {:ok, subscription_params} = + Subscriptions.parse_subscription_params(%{ + "schema" => "public", + "table" => "test", + "filter" => "subject=eq.hey" + }) - %Postgrex.Result{rows: [[num]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) - assert num != 0 - end + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] - test "delete_all", %{conn: conn} do - create_subscriptions(conn, 10) - assert {:ok, %Postgrex.Result{}} = Subscriptions.delete_all(conn) - assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) - end + assert {:error, + {:subscription_insert_failed, + "Unable to subscribe to changes with given parameters. An exception happened so please check your connect parameters: [schema: public, table: test, filters: [{\"subject\", \"eq\", \"hey\"}]]. Exception: ERROR P0001 (raise_exception) invalid column for filter subject"}} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) - test "delete", %{conn: conn} do - Subscriptions.delete_all(conn) - id = UUID.uuid1() - bin_id = UUID.string_to_binary!(id) + %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end - params_list = [%{id: id, claims: %{"role" => "anon"}, params: %{"event" => "*"}}] - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) - Process.sleep(500) + test "column type is wrong", %{conn: conn} do + {:ok, subscription_params} = + Subscriptions.parse_subscription_params(%{ + "schema" => "public", + "table" => "test", + "filter" => "id=eq.hey" + }) - assert {:ok, %Postgrex.Result{}} = Subscriptions.delete(conn, bin_id) - assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] + + assert {:error, + {:subscription_insert_failed, + "Unable to subscribe to changes with given parameters. An exception happened so please check your connect parameters: [schema: public, table: test, filters: [{\"id\", \"eq\", \"hey\"}]]. Exception: ERROR 22P02 (invalid_text_representation) invalid input syntax for type integer: \"hey\""}} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) + + %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end + + test "connection error" do + {:ok, subscription_params} = + Subscriptions.parse_subscription_params(%{"schema" => "public", "table" => "test"}) + + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] + conn = spawn(fn -> :ok end) + + assert {:error, {:exit, _}} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) + end + + test "timeout", %{conn: conn} do + {:ok, subscription_params} = Subscriptions.parse_subscription_params(%{"schema" => "public", "table" => "test"}) + + Task.start(fn -> Postgrex.query!(conn, "SELECT pg_sleep(20)", []) end) + + subscription_list = [%{claims: %{"role" => "anon"}, id: UUID.uuid1(), subscription_params: subscription_params}] + + assert {:error, %DBConnection.ConnectionError{reason: :queue_timeout}} = + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) + end end - test "delete_multi", %{conn: conn} do - Subscriptions.delete_all(conn) - id1 = UUID.uuid1() - id2 = UUID.uuid1() + describe "delete_all/1" do + test "delete_all", %{conn: conn} do + create_subscriptions(conn, 10) + assert {:ok, %Postgrex.Result{}} = Subscriptions.delete_all(conn) + assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end + end - bin_id2 = UUID.string_to_binary!(id2) - bin_id1 = UUID.string_to_binary!(id1) + describe "delete/2" do + test "delete", %{conn: conn} do + id = UUID.uuid1() + bin_id = UUID.string_to_binary!(id) - params_list = [ - %{claims: %{"role" => "anon"}, id: id1, params: %{"event" => "*"}}, - %{claims: %{"role" => "anon"}, id: id2, params: %{"event" => "*"}} - ] + {:ok, subscription_params} = + Subscriptions.parse_subscription_params(%{ + "schema" => "public", + "table" => "test", + "filter" => "id=eq.hey" + }) - Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) - Process.sleep(500) + subscription_list = [%{claims: %{"role" => "anon"}, id: id, subscription_params: subscription_params}] + Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) - assert {:ok, %Postgrex.Result{}} = Subscriptions.delete_multi(conn, [bin_id1, bin_id2]) - assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + assert {:ok, %Postgrex.Result{}} = Subscriptions.delete(conn, bin_id) + assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end end - test "maybe_delete_all", %{conn: conn} do - Subscriptions.delete_all(conn) - create_subscriptions(conn, 10) + describe "delete_multi/2" do + test "delete_multi", %{conn: conn} do + Subscriptions.delete_all(conn) + id1 = UUID.uuid1() + id2 = UUID.uuid1() + + bin_id2 = UUID.string_to_binary!(id2) + bin_id1 = UUID.string_to_binary!(id1) + + {:ok, subscription_params} = + Subscriptions.parse_subscription_params(%{ + "schema" => "public", + "table" => "test", + "filter" => "id=eq.123" + }) + + subscription_list = [ + %{claims: %{"role" => "anon"}, id: id1, subscription_params: subscription_params}, + %{claims: %{"role" => "anon"}, id: id2, subscription_params: subscription_params} + ] + + assert {:ok, _} = Subscriptions.create(conn, "supabase_realtime_test", subscription_list, self(), self()) + + assert %Postgrex.Result{rows: [[2]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + assert {:ok, %Postgrex.Result{}} = Subscriptions.delete_multi(conn, [bin_id1, bin_id2]) + assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end + end - assert {:ok, %Postgrex.Result{}} = Subscriptions.maybe_delete_all(conn) - assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + describe "maybe_delete_all/1" do + test "maybe_delete_all", %{conn: conn} do + Subscriptions.delete_all(conn) + create_subscriptions(conn, 10) + + assert {:ok, %Postgrex.Result{}} = Subscriptions.maybe_delete_all(conn) + assert %Postgrex.Result{rows: [[0]]} = Postgrex.query!(conn, "select count(*) from realtime.subscription", []) + end end - test "fetch_publication_tables", %{conn: conn} do - tables = Subscriptions.fetch_publication_tables(conn, "supabase_realtime_test") - assert tables[{"*"}] != nil + describe "fetch_publication_tables/2" do + test "fetch_publication_tables", %{conn: conn} do + tables = Subscriptions.fetch_publication_tables(conn, "supabase_realtime_test") + assert tables[{"*"}] != nil + end end defp create_subscriptions(conn, num) do @@ -131,13 +219,12 @@ defmodule Realtime.Extensionsubscriptions.CdcRlsSubscriptionsTest do "role" => "anon" }, id: UUID.uuid1(), - params: %{"event" => "*", "schema" => "public"} + subscription_params: {"public", "*", []} } | acc ] end) Subscriptions.create(conn, "supabase_realtime_test", params_list, self(), self()) - Process.sleep(500) end end diff --git a/test/realtime/gen_rpc_pub_sub/worker_test.exs b/test/realtime/gen_rpc_pub_sub/worker_test.exs new file mode 100644 index 000000000..880fa5132 --- /dev/null +++ b/test/realtime/gen_rpc_pub_sub/worker_test.exs @@ -0,0 +1,71 @@ +defmodule Realtime.GenRpcPubSub.WorkerTest do + use ExUnit.Case, async: true + alias Realtime.GenRpcPubSub.Worker + alias Realtime.GenRpc + alias Realtime.Nodes + + use Mimic + + @topic "test_topic" + + setup do + worker = start_link_supervised!({Worker, {Realtime.PubSub, __MODULE__}}) + %{worker: worker} + end + + describe "forward to local" do + test "local broadcast", %{worker: worker} do + :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, @topic) + send(worker, Worker.forward_to_local(@topic, "le message", Phoenix.PubSub)) + + assert_receive "le message" + refute_receive _any + end + end + + describe "forward to region" do + setup %{worker: worker} do + GenRpc + |> stub() + |> allow(self(), worker) + + Nodes + |> stub() + |> allow(self(), worker) + + :ok + end + + test "local broadcast + forward to other nodes", %{worker: worker} do + parent = self() + expect(Nodes, :region_nodes, fn "us-east-1" -> [node(), :node_us_2, :node_us_3] end) + + expect(GenRpc, :abcast, fn [:node_us_2, :node_us_3], + Realtime.GenRpcPubSub.WorkerTest, + {:ftl, "test_topic", "le message", Phoenix.PubSub}, + [] -> + send(parent, :abcast_called) + :ok + end) + + :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, @topic) + send(worker, Worker.forward_to_region(@topic, "le message", Phoenix.PubSub)) + + assert_receive "le message" + assert_receive :abcast_called + refute_receive _any + end + + test "local broadcast and no other nodes", %{worker: worker} do + expect(Nodes, :region_nodes, fn "us-east-1" -> [node()] end) + + reject(GenRpc, :abcast, 4) + + :ok = Phoenix.PubSub.subscribe(Realtime.PubSub, @topic) + send(worker, Worker.forward_to_region(@topic, "le message", Phoenix.PubSub)) + + assert_receive "le message" + refute_receive _any + end + end +end diff --git a/test/realtime/gen_rpc_pub_sub_test.exs b/test/realtime/gen_rpc_pub_sub_test.exs new file mode 100644 index 000000000..4c5ded562 --- /dev/null +++ b/test/realtime/gen_rpc_pub_sub_test.exs @@ -0,0 +1,126 @@ +Application.put_env(:phoenix_pubsub, :test_adapter, {Realtime.GenRpcPubSub, []}) +Code.require_file("../../deps/phoenix_pubsub/test/shared/pubsub_test.exs", __DIR__) + +defmodule Realtime.GenRpcPubSubTest do + # Application env being changed + use ExUnit.Case, async: false + + test "it sets off_heap message_queue_data flag on the workers" do + assert Realtime.PubSubElixir.Realtime.PubSub.Adapter_1 + |> Process.whereis() + |> Process.info(:message_queue_data) == {:message_queue_data, :off_heap} + end + + test "it sets fullsweep_after flag on the workers" do + assert Realtime.PubSubElixir.Realtime.PubSub.Adapter_1 + |> Process.whereis() + |> Process.info(:fullsweep_after) == {:fullsweep_after, 20} + end + + @aux_mod (quote do + defmodule Subscriber do + # Relay messages to testing node + def subscribe(subscriber, topic) do + spawn(fn -> + RealtimeWeb.Endpoint.subscribe(topic) + 2 = length(Realtime.Nodes.region_nodes("us-east-1")) + 2 = length(Realtime.Nodes.region_nodes("ap-southeast-2")) + send(subscriber, {:ready, Application.get_env(:realtime, :region)}) + + loop = fn f -> + receive do + msg -> send(subscriber, {:relay, node(), msg}) + end + + f.(f) + end + + loop.(loop) + end) + end + end + end) + + Code.eval_quoted(@aux_mod) + + @topic "gen-rpc-pub-sub-test-topic" + + for regional_broadcasting <- [true, false] do + describe "regional balancing = #{regional_broadcasting}" do + setup do + previous_region = Application.get_env(:realtime, :region) + Application.put_env(:realtime, :region, "us-east-1") + on_exit(fn -> Application.put_env(:realtime, :region, previous_region) end) + + previous_regional_broadcast = Application.get_env(:realtime, :regional_broadcasting) + Application.put_env(:realtime, :regional_broadcasting, unquote(regional_broadcasting)) + on_exit(fn -> Application.put_env(:realtime, :regional_broadcasting, previous_regional_broadcast) end) + + :ok + end + + @describetag regional_broadcasting: regional_broadcasting + + test "all messages are received" do + # start 1 node in us-east-1 to test my region broadcasting + # start 2 nodes in ap-southeast-2 to test other region broadcasting + + us_node = :us_node + ap2_nodeX = :ap2_nodeX + ap2_nodeY = :ap2_nodeY + + # Avoid port collision + client_config_per_node = %{ + :"main@127.0.0.1" => 5969, + :"#{us_node}@127.0.0.1" => 16970, + :"#{ap2_nodeX}@127.0.0.1" => 16971, + :"#{ap2_nodeY}@127.0.0.1" => 16972 + } + + extra_config = [{:gen_rpc, :client_config_per_node, {:internal, client_config_per_node}}] + + on_exit(fn -> Application.put_env(:gen_rpc, :client_config_per_node, {:internal, %{}}) end) + Application.put_env(:gen_rpc, :client_config_per_node, {:internal, client_config_per_node}) + + us_extra_config = + [{:realtime, :region, "us-east-1"}, {:gen_rpc, :tcp_server_port, 16970}] ++ extra_config + + {:ok, _} = Clustered.start(@aux_mod, name: us_node, extra_config: us_extra_config, phoenix_port: 4014) + + ap2_nodeX_extra_config = + [{:realtime, :region, "ap-southeast-2"}, {:gen_rpc, :tcp_server_port, 16971}] ++ extra_config + + {:ok, _} = Clustered.start(@aux_mod, name: ap2_nodeX, extra_config: ap2_nodeX_extra_config, phoenix_port: 4015) + + ap2_nodeY_extra_config = + [{:realtime, :region, "ap-southeast-2"}, {:gen_rpc, :tcp_server_port, 16972}] ++ extra_config + + {:ok, _} = Clustered.start(@aux_mod, name: ap2_nodeY, extra_config: ap2_nodeY_extra_config, phoenix_port: 4016) + + # Ensuring that syn had enough time to propagate to all nodes the group information + Process.sleep(3000) + + RealtimeWeb.Endpoint.subscribe(@topic) + :erpc.multicall(Node.list(), Subscriber, :subscribe, [self(), @topic]) + + assert length(Realtime.Nodes.region_nodes("us-east-1")) == 2 + assert length(Realtime.Nodes.region_nodes("ap-southeast-2")) == 2 + + assert_receive {:ready, "us-east-1"} + assert_receive {:ready, "ap-southeast-2"} + assert_receive {:ready, "ap-southeast-2"} + + message = %Phoenix.Socket.Broadcast{topic: @topic, event: "an event", payload: ["a", %{"b" => "c"}, 1, 23]} + Phoenix.PubSub.broadcast(Realtime.PubSub, @topic, message) + + assert_receive ^message + + # Remote nodes received the broadcast + assert_receive {:relay, :"us_node@127.0.0.1", ^message}, 5000 + assert_receive {:relay, :"ap2_nodeX@127.0.0.1", ^message}, 1000 + assert_receive {:relay, :"ap2_nodeY@127.0.0.1", ^message}, 1000 + refute_receive _any + end + end + end +end diff --git a/test/realtime/gen_rpc_test.exs b/test/realtime/gen_rpc_test.exs index dd837aaf8..fbbd155f4 100644 --- a/test/realtime/gen_rpc_test.exs +++ b/test/realtime/gen_rpc_test.exs @@ -28,7 +28,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: true, - tenant: "123", mechanism: :gen_rpc }} end @@ -43,7 +42,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: false, - tenant: "123", mechanism: :gen_rpc }} end @@ -57,7 +55,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: true, - tenant: "123", mechanism: :gen_rpc }} end @@ -72,7 +69,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: false, - tenant: "123", mechanism: :gen_rpc }} end @@ -94,7 +90,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: false, - tenant: 123, mechanism: :gen_rpc }} end @@ -116,7 +111,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: false, - tenant: 123, mechanism: :gen_rpc }} end @@ -131,7 +125,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: false, - tenant: "123", mechanism: :gen_rpc }} end @@ -146,7 +139,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: false, - tenant: "123", mechanism: :gen_rpc }} end @@ -168,10 +160,101 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: false, - tenant: 123, mechanism: :gen_rpc }} end + + test "bad node" do + node = :"unknown@1.1.1.1" + + log = + capture_log(fn -> + assert GenRpc.call(node, Map, :fetch, [%{a: 1}, :a], tenant_id: 123) == {:error, :rpc_error, :badnode} + end) + + assert log =~ + ~r/project=123 external_id=123 \[error\] ErrorOnRpcCall: %{+error: :badnode, mod: Map, func: :fetch, target: :"#{node}"/ + end + end + + describe "abcast/4" do + test "abcast to registered process", %{node: node} do + name = + System.unique_integer() + |> to_string() + |> String.to_atom() + + :erlang.register(name, self()) + + # Use erpc to make the other node abcast to this one + :erpc.call(node, GenRpc, :abcast, [[node()], name, "a message", []]) + + assert_receive "a message" + refute_receive _any + end + + @tag extra_config: [{:gen_rpc, :tcp_server_port, 9999}] + test "tcp error" do + Logger.put_process_level(self(), :debug) + + log = + capture_log(fn -> + assert GenRpc.abcast(Node.list(), :some_process_name, "a message", []) == :ok + # We have to wait for gen_rpc logs to show up + Process.sleep(100) + end) + + assert log =~ "[error] event=connect_to_remote_server" + + refute_receive _any + end + end + + describe "cast/5" do + test "apply on a local node" do + parent = self() + + assert GenRpc.cast(node(), Kernel, :send, [parent, :sent]) == :ok + + assert_receive :sent + refute_receive _any + end + + test "apply on a remote node", %{node: node} do + parent = self() + + assert GenRpc.cast(node, Kernel, :send, [parent, :sent]) == :ok + + assert_receive :sent + refute_receive _any + end + + test "bad node does nothing" do + node = :"unknown@1.1.1.1" + + parent = self() + + assert GenRpc.cast(node, Kernel, :send, [parent, :sent]) == :ok + + refute_receive _any + end + + @tag extra_config: [{:gen_rpc, :tcp_server_port, 9999}] + test "tcp error", %{node: node} do + parent = self() + Logger.put_process_level(self(), :debug) + + log = + capture_log(fn -> + assert GenRpc.cast(node, Kernel, :send, [parent, :sent]) == :ok + # We have to wait for gen_rpc logs to show up + Process.sleep(100) + end) + + assert log =~ "[error] event=connect_to_remote_server" + + refute_receive _any + end end describe "multicast/4" do @@ -223,7 +306,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: true, - tenant: "123", mechanism: :gen_rpc }} @@ -232,7 +314,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: true, - tenant: "123", mechanism: :gen_rpc }} end @@ -259,7 +340,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: false, - tenant: 123, mechanism: :gen_rpc }} @@ -268,7 +348,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: false, - tenant: 123, mechanism: :gen_rpc }} end @@ -293,7 +372,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^node, success: false, - tenant: 123, mechanism: :gen_rpc }} @@ -302,7 +380,6 @@ defmodule Realtime.GenRpcTest do origin_node: ^current_node, target_node: ^current_node, success: true, - tenant: 123, mechanism: :gen_rpc }} end diff --git a/test/realtime/messages_test.exs b/test/realtime/messages_test.exs index 3bef9a5e0..5590adca9 100644 --- a/test/realtime/messages_test.exs +++ b/test/realtime/messages_test.exs @@ -1,10 +1,11 @@ defmodule Realtime.MessagesTest do - use Realtime.DataCase, async: true + # usage of Clustered + use Realtime.DataCase, async: false alias Realtime.Api.Message alias Realtime.Database alias Realtime.Messages - alias Realtime.Repo + alias Realtime.Tenants.Repo setup do tenant = Containers.checkout_tenant(run_migrations: true) @@ -13,35 +14,248 @@ defmodule Realtime.MessagesTest do date_start = Date.utc_today() |> Date.add(-10) date_end = Date.utc_today() create_messages_partitions(conn, date_start, date_end) + + on_exit(fn -> :telemetry.detach(__MODULE__) end) + + :telemetry.attach( + __MODULE__, + [:realtime, :tenants, :replay], + &__MODULE__.handle_telemetry/4, + pid: self() + ) + %{conn: conn, tenant: tenant, date_start: date_start, date_end: date_end} end - test "delete_old_messages/1 deletes messages older than 72 hours", %{ - conn: conn, - tenant: tenant, - date_start: date_start, - date_end: date_end - } do - utc_now = NaiveDateTime.utc_now() - limit = NaiveDateTime.add(utc_now, -72, :hour) - - messages = - for date <- Date.range(date_start, date_end) do - inserted_at = date |> NaiveDateTime.new!(Time.new!(0, 0, 0)) - message_fixture(tenant, %{inserted_at: inserted_at}) + describe "replay/5" do + test "invalid replay params", %{tenant: tenant} do + assert Messages.replay(self(), tenant.external_id, "a topic", "not a number", 123) == + {:error, :invalid_replay_params} + + assert Messages.replay(self(), tenant.external_id, "a topic", 123, "not a number") == + {:error, :invalid_replay_params} + + assert Messages.replay(self(), tenant.external_id, "a topic", 253_402_300_800_000, 10) == + {:error, :invalid_replay_params} + end + + test "empty replay", %{conn: conn} do + assert Messages.replay(conn, "tenant_id", "test", 0, 10) == {:ok, [], MapSet.new()} + end + + test "replay respects limit", %{conn: conn, tenant: tenant} do + external_id = tenant.external_id + + m1 = + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :minute), + "event" => "new", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "new"} + }) + + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-2, :minute), + "event" => "old", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "old"} + }) + + assert Messages.replay(conn, external_id, "test", 0, 1) == {:ok, [m1], MapSet.new([m1.id])} + + assert_receive { + :telemetry, + [:realtime, :tenants, :replay], + %{latency: _}, + %{tenant: ^external_id} + } + end + + test "replay private topic only", %{conn: conn, tenant: tenant} do + privatem = + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :minute), + "event" => "new", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "new"} + }) + + message_fixture(tenant, %{ + "private" => false, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-2, :minute), + "event" => "old", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "old"} + }) + + assert Messages.replay(conn, tenant.external_id, "test", 0, 10) == {:ok, [privatem], MapSet.new([privatem.id])} + end + + test "replay extension=broadcast", %{conn: conn, tenant: tenant} do + privatem = + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :minute), + "event" => "new", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "new"} + }) + + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-2, :minute), + "event" => "old", + "extension" => "presence", + "topic" => "test", + "payload" => %{"value" => "old"} + }) + + assert Messages.replay(conn, tenant.external_id, "test", 0, 10) == {:ok, [privatem], MapSet.new([privatem.id])} + end + + test "replay respects since", %{conn: conn, tenant: tenant} do + m1 = + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-2, :minute), + "event" => "first", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "first"} + }) + + m2 = + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :minute), + "event" => "second", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "second"} + }) + + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-10, :minute), + "event" => "old", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "old"} + }) + + since = DateTime.utc_now() |> DateTime.add(-3, :minute) |> DateTime.to_unix(:millisecond) + + assert Messages.replay(conn, tenant.external_id, "test", since, 10) == {:ok, [m1, m2], MapSet.new([m1.id, m2.id])} + end + + test "replay respects hard max limit of 25", %{conn: conn, tenant: tenant} do + for _i <- 1..30 do + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now(), + "event" => "event", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "message"} + }) end - assert length(messages) == 11 + assert {:ok, messages, set} = Messages.replay(conn, tenant.external_id, "test", 0, 30) + assert length(messages) == 25 + assert MapSet.size(set) == 25 + end + + test "replay respects hard min limit of 1", %{conn: conn, tenant: tenant} do + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now(), + "event" => "event", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "message"} + }) + + assert {:ok, messages, set} = Messages.replay(conn, tenant.external_id, "test", 0, 0) + assert length(messages) == 1 + assert MapSet.size(set) == 1 + end + + test "distributed replay", %{conn: conn, tenant: tenant} do + m = + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now(), + "event" => "event", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "message"} + }) + + {:ok, node} = Clustered.start() + + # Call remote node passing the database connection that is local to this node + assert :erpc.call(node, Messages, :replay, [conn, tenant.external_id, "test", 0, 30]) == + {:ok, [m], MapSet.new([m.id])} + end - to_keep = - Enum.reject( - messages, - &(NaiveDateTime.compare(limit, &1.inserted_at) == :gt) - ) + test "distributed replay error", %{tenant: tenant} do + message_fixture(tenant, %{ + "inserted_at" => NaiveDateTime.utc_now(), + "event" => "event", + "extension" => "broadcast", + "topic" => "test", + "private" => true, + "payload" => %{"value" => "message"} + }) - assert :ok = Messages.delete_old_messages(conn) - {:ok, current} = Repo.all(conn, from(m in Message), Message) + {:ok, node} = Clustered.start() - assert Enum.sort(current) == Enum.sort(to_keep) + # Call remote node passing the database connection that is local to this node + pid = spawn(fn -> :ok end) + + assert :erpc.call(node, Messages, :replay, [pid, tenant.external_id, "test", 0, 30]) == + {:error, :failed_to_replay_messages} + end end + + describe "delete_old_messages/1" do + test "delete_old_messages/1 deletes messages older than 72 hours", %{ + conn: conn, + tenant: tenant, + date_start: date_start, + date_end: date_end + } do + utc_now = NaiveDateTime.utc_now() + limit = NaiveDateTime.add(utc_now, -72, :hour) + + messages = + for date <- Date.range(date_start, date_end) do + inserted_at = date |> NaiveDateTime.new!(Time.new!(0, 0, 0)) + message_fixture(tenant, %{inserted_at: inserted_at}) + end + + assert length(messages) == 11 + + to_keep = + Enum.reject( + messages, + &(NaiveDateTime.compare(NaiveDateTime.beginning_of_day(limit), &1.inserted_at) == :gt) + ) + + assert :ok = Messages.delete_old_messages(conn) + {:ok, current} = Repo.all(conn, from(m in Message), Message) + + assert Enum.sort(current) == Enum.sort(to_keep) + end + end + + def handle_telemetry(event, measures, metadata, pid: pid), do: send(pid, {:telemetry, event, measures, metadata}) end diff --git a/test/realtime/metrics_cleaner_test.exs b/test/realtime/metrics_cleaner_test.exs index fbe9d8515..a071f72b4 100644 --- a/test/realtime/metrics_cleaner_test.exs +++ b/test/realtime/metrics_cleaner_test.exs @@ -8,11 +8,9 @@ defmodule Realtime.MetricsCleanerTest do setup do interval = Application.get_env(:realtime, :metrics_cleaner_schedule_timer_in_ms) Application.put_env(:realtime, :metrics_cleaner_schedule_timer_in_ms, 100) - tenant = Containers.checkout_tenant(run_migrations: true) + on_exit(fn -> Application.put_env(:realtime, :metrics_cleaner_schedule_timer_in_ms, interval) end) - on_exit(fn -> - Application.put_env(:realtime, :metrics_cleaner_schedule_timer_in_ms, interval) - end) + tenant = Containers.checkout_tenant(run_migrations: true) %{tenant: tenant} end @@ -24,22 +22,30 @@ defmodule Realtime.MetricsCleanerTest do # Wait for promex to collect the metrics Process.sleep(6000) - Realtime.Telemetry.execute( + :telemetry.execute( [:realtime, :connections], %{connected: 10, connected_cluster: 10, limit: 100}, %{tenant: external_id} ) - assert Realtime.PromEx.Metrics - |> :ets.select([{{{:_, %{tenant: :"$1"}}, :_}, [], [:"$1"]}]) - |> Enum.any?(&(&1 == external_id)) + :telemetry.execute( + [:realtime, :connections], + %{connected: 20, connected_cluster: 20, limit: 100}, + %{tenant: "disconnected-tenant"} + ) - Connect.shutdown(external_id) + metrics = Realtime.PromEx.get_metrics() |> IO.iodata_to_binary() + + assert String.contains?(metrics, external_id) + assert String.contains?(metrics, "disconnected-tenant") + + # Wait for clenaup to run Process.sleep(200) - refute Realtime.PromEx.Metrics - |> :ets.select([{{{:_, %{tenant: :"$1"}}, :_}, [], [:"$1"]}]) - |> Enum.any?(&(&1 == external_id)) + metrics = Realtime.PromEx.get_metrics() |> IO.iodata_to_binary() + + assert String.contains?(metrics, external_id) + refute String.contains?(metrics, "disconnected-tenant") end end end diff --git a/test/realtime/monitoring/distributed_metrics_test.exs b/test/realtime/monitoring/distributed_metrics_test.exs index 491083973..49fe4af6f 100644 --- a/test/realtime/monitoring/distributed_metrics_test.exs +++ b/test/realtime/monitoring/distributed_metrics_test.exs @@ -15,7 +15,7 @@ defmodule Realtime.DistributedMetricsTest do ^node => %{ pid: _pid, port: _port, - queue_size: {:ok, 0}, + queue_size: {:ok, _}, state: :up, inet_stats: [ recv_oct: _, diff --git a/test/realtime/monitoring/erl_sys_mon_test.exs b/test/realtime/monitoring/erl_sys_mon_test.exs index b1e122d58..e9c7b87b7 100644 --- a/test/realtime/monitoring/erl_sys_mon_test.exs +++ b/test/realtime/monitoring/erl_sys_mon_test.exs @@ -5,16 +5,25 @@ defmodule Realtime.Monitoring.ErlSysMonTest do describe "system monitoring" do test "logs system monitor events" do - start_supervised!({ErlSysMon, config: [{:long_message_queue, {1, 10}}]}) + start_supervised!({ErlSysMon, config: [{:long_message_queue, {1, 100}}]}) - assert capture_log(fn -> - Task.async(fn -> - Enum.map(1..1000, &send(self(), &1)) - # Wait for ErlSysMon to notice - Process.sleep(4000) - end) - |> Task.await() - end) =~ "Realtime.ErlSysMon message:" + log = + capture_log(fn -> + Task.async(fn -> + Process.register(self(), TestProcess) + Enum.map(1..1000, &send(self(), &1)) + # Wait for ErlSysMon to notice + Process.sleep(4000) + end) + |> Task.await() + end) + + assert log =~ "Realtime.ErlSysMon message:" + assert log =~ "$initial_call\", {Realtime.Monitoring.ErlSysMonTest" + assert log =~ "ancestors\", [#{inspect(self())}]" + assert log =~ "registered_name: TestProcess" + assert log =~ "message_queue_len: " + assert log =~ "total_heap_size: " end end end diff --git a/test/realtime/monitoring/prom_ex/plugins/distributed_test.exs b/test/realtime/monitoring/prom_ex/plugins/distributed_test.exs index ff4c4f098..731873066 100644 --- a/test/realtime/monitoring/prom_ex/plugins/distributed_test.exs +++ b/test/realtime/monitoring/prom_ex/plugins/distributed_test.exs @@ -23,55 +23,41 @@ defmodule Realtime.PromEx.Plugins.DistributedTest do describe "pooling metrics" do setup do - metrics = - PromEx.get_metrics(MetricsTest) - |> String.split("\n", trim: true) - - %{metrics: metrics} + %{metrics: PromEx.get_metrics(MetricsTest)} end test "send_pending_bytes", %{metrics: metrics, node: node} do - pattern = ~r/dist_send_pending_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) == 0 + assert metric_value(metrics, "dist_send_pending_bytes", origin_node: node(), target_node: node) == 0 end test "send_count", %{metrics: metrics, node: node} do - pattern = ~r/dist_send_count{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "dist_send_count", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "send_bytes", %{metrics: metrics, node: node} do - pattern = ~r/dist_send_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "dist_send_bytes", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "recv_count", %{metrics: metrics, node: node} do - pattern = ~r/dist_recv_count{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "dist_recv_count", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "recv_bytes", %{metrics: metrics, node: node} do - pattern = ~r/dist_recv_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "dist_recv_bytes", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "queue_size", %{metrics: metrics, node: node} do - pattern = ~r/dist_queue_size{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert is_integer(metric_value(metrics, pattern)) + assert is_integer(metric_value(metrics, "dist_queue_size", origin_node: node(), target_node: node)) end end - defp metric_value(metrics, pattern) do - metrics - |> Enum.find_value( - "0", - fn item -> - case Regex.run(pattern, item, capture: ["number"]) do - [number] -> number - _ -> false - end - end - ) - |> String.to_integer() - end + defp metric_value(metrics, metric, expected_tags), do: MetricsHelper.search(metrics, metric, expected_tags) end diff --git a/test/realtime/monitoring/prom_ex/plugins/gen_rpc_test.exs b/test/realtime/monitoring/prom_ex/plugins/gen_rpc_test.exs index 25d8fae16..5396aae6b 100644 --- a/test/realtime/monitoring/prom_ex/plugins/gen_rpc_test.exs +++ b/test/realtime/monitoring/prom_ex/plugins/gen_rpc_test.exs @@ -23,55 +23,42 @@ defmodule Realtime.PromEx.Plugins.GenRpcTest do describe "pooling metrics" do setup do - metrics = - PromEx.get_metrics(MetricsTest) - |> String.split("\n", trim: true) - - %{metrics: metrics} + %{metrics: PromEx.get_metrics(MetricsTest)} end test "send_pending_bytes", %{metrics: metrics, node: node} do - pattern = ~r/gen_rpc_send_pending_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) == 0 + assert metric_value(metrics, "gen_rpc_send_pending_bytes", origin_node: node(), target_node: node) == 0 end test "send_count", %{metrics: metrics, node: node} do - pattern = ~r/gen_rpc_send_count{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "gen_rpc_send_count", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "send_bytes", %{metrics: metrics, node: node} do - pattern = ~r/gen_rpc_send_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "gen_rpc_send_bytes", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "recv_count", %{metrics: metrics, node: node} do - pattern = ~r/gen_rpc_recv_count{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "gen_rpc_recv_count", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "recv_bytes", %{metrics: metrics, node: node} do - pattern = ~r/gen_rpc_recv_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) > 0 + value = metric_value(metrics, "gen_rpc_recv_bytes", origin_node: node(), target_node: node) + assert is_integer(value) + assert value > 0 end test "queue_size", %{metrics: metrics, node: node} do - pattern = ~r/gen_rpc_queue_size_bytes{origin_node=\"#{node()}\",target_node=\"#{node}\"}\s(?\d+)/ - assert metric_value(metrics, pattern) == 0 + value = metric_value(metrics, "gen_rpc_queue_size_bytes", origin_node: node(), target_node: node) + assert is_integer(value) end end - defp metric_value(metrics, pattern) do - metrics - |> Enum.find_value( - "0", - fn item -> - case Regex.run(pattern, item, capture: ["number"]) do - [number] -> number - _ -> false - end - end - ) - |> String.to_integer() - end + defp metric_value(metrics, metric, expected_tags), do: MetricsHelper.search(metrics, metric, expected_tags) end diff --git a/test/realtime/monitoring/prom_ex/plugins/phoenix_test.exs b/test/realtime/monitoring/prom_ex/plugins/phoenix_test.exs index a73e6e2f5..fefde8dc3 100644 --- a/test/realtime/monitoring/prom_ex/plugins/phoenix_test.exs +++ b/test/realtime/monitoring/prom_ex/plugins/phoenix_test.exs @@ -1,6 +1,7 @@ defmodule Realtime.PromEx.Plugins.PhoenixTest do use Realtime.DataCase, async: false alias Realtime.PromEx.Plugins + alias Realtime.Integration.WebsocketClient defmodule MetricsTest do use PromEx, otp_app: :realtime_test_phoenix @@ -10,34 +11,79 @@ defmodule Realtime.PromEx.Plugins.PhoenixTest do end end + setup_all do + start_supervised!(MetricsTest) + :ok + end + + setup do + %{tenant: Containers.checkout_tenant(run_migrations: true)} + end + describe "pooling metrics" do - setup do - start_supervised!(MetricsTest) - :ok + test "number of connections", %{tenant: tenant} do + {:ok, token} = token_valid(tenant, "anon", %{}) + + {:ok, _} = + WebsocketClient.connect( + self(), + uri(tenant, Phoenix.Socket.V1.JSONSerializer, 4002), + Phoenix.Socket.V1.JSONSerializer, + [{"x-api-key", token}] + ) + + {:ok, _} = + WebsocketClient.connect( + self(), + uri(tenant, Phoenix.Socket.V1.JSONSerializer, 4002), + Phoenix.Socket.V1.JSONSerializer, + [{"x-api-key", token}] + ) + + Process.sleep(200) + assert metric_value("phoenix_connections_total") >= 2 end + end + + describe "event metrics" do + test "socket connected", %{tenant: tenant} do + {:ok, token} = token_valid(tenant, "anon", %{}) - test "number of connections" do - # Trigger a connection by making a request to the endpoint - url = RealtimeWeb.Endpoint.url() <> "/healthcheck" - Req.get!(url) + {:ok, _} = + WebsocketClient.connect( + self(), + uri(tenant, Phoenix.Socket.V1.JSONSerializer, 4002), + Phoenix.Socket.V1.JSONSerializer, + [{"x-api-key", token}] + ) + + {:ok, _} = + WebsocketClient.connect( + self(), + uri(tenant, RealtimeWeb.Socket.V2Serializer, 4002), + RealtimeWeb.Socket.V2Serializer, + [{"x-api-key", token}] + ) Process.sleep(200) - assert metric_value() > 0 + + assert metric_value("phoenix_socket_connected_duration_milliseconds_count", + endpoint: "RealtimeWeb.Endpoint", + result: "ok", + serializer: "Elixir.Phoenix.Socket.V1.JSONSerializer", + transport: "websocket" + ) >= 1 + + assert metric_value("phoenix_socket_connected_duration_milliseconds_count", + endpoint: "RealtimeWeb.Endpoint", + result: "ok", + serializer: "Elixir.RealtimeWeb.Socket.V2Serializer", + transport: "websocket" + ) >= 1 end end - defp metric_value() do - PromEx.get_metrics(MetricsTest) - |> String.split("\n", trim: true) - |> Enum.find_value( - "0", - fn item -> - case Regex.run(~r/phoenix_connections_total\s(?\d+)/, item, capture: ["number"]) do - [number] -> number - _ -> false - end - end - ) - |> String.to_integer() + defp metric_value(metric, expected_tags \\ nil) do + MetricsHelper.search(PromEx.get_metrics(MetricsTest), metric, expected_tags) end end diff --git a/test/realtime/monitoring/prom_ex/plugins/tenant_test.exs b/test/realtime/monitoring/prom_ex/plugins/tenant_test.exs index 164c8d2eb..84ca9b1fb 100644 --- a/test/realtime/monitoring/prom_ex/plugins/tenant_test.exs +++ b/test/realtime/monitoring/prom_ex/plugins/tenant_test.exs @@ -1,12 +1,14 @@ defmodule Realtime.PromEx.Plugins.TenantTest do - alias Realtime.Tenants.Authorization.Policies use Realtime.DataCase, async: false alias Realtime.PromEx.Plugins.Tenant alias Realtime.Rpc - alias Realtime.UsersCounter - alias Realtime.Tenants.Authorization.Policies alias Realtime.Tenants.Authorization + alias Realtime.Tenants.Authorization.Policies + alias Realtime.Tenants.Authorization.Policies + alias Realtime.UsersCounter + alias Realtime.RateCounter + alias Realtime.GenCounter defmodule MetricsTest do use PromEx, otp_app: :realtime_test_phoenix @@ -15,6 +17,11 @@ defmodule Realtime.PromEx.Plugins.TenantTest do def plugins, do: [{Tenant, poll_rate: 50}] end + setup_all do + start_supervised!(MetricsTest) + :ok + end + def handle_telemetry(event, metadata, content, pid: pid), do: send(pid, {event, metadata, content}) @aux_mod (quote do @@ -24,45 +31,52 @@ defmodule Realtime.PromEx.Plugins.TenantTest do end def fake_db_event(external_id) do - external_id - |> Realtime.Tenants.db_events_per_second_rate() - |> Realtime.RateCounter.new() + rate = Realtime.Tenants.db_events_per_second_rate(external_id, 100) - external_id - |> Realtime.Tenants.db_events_per_second_key() - |> Realtime.GenCounter.add() + rate + |> tap(&RateCounter.new(&1)) + |> tap(&GenCounter.add(&1.id)) + |> RateCounterHelper.tick!() end def fake_event(external_id) do - external_id - |> Realtime.Tenants.events_per_second_rate(123) - |> Realtime.RateCounter.new() + rate = Realtime.Tenants.events_per_second_rate(external_id, 123) - external_id - |> Realtime.Tenants.events_per_second_key() - |> Realtime.GenCounter.add() + rate + |> tap(&RateCounter.new(&1)) + |> tap(&GenCounter.add(&1.id)) + |> RateCounterHelper.tick!() end def fake_presence_event(external_id) do - external_id - |> Realtime.Tenants.presence_events_per_second_rate(123) - |> Realtime.RateCounter.new() + rate = Realtime.Tenants.presence_events_per_second_rate(external_id, 123) - external_id - |> Realtime.Tenants.presence_events_per_second_key() - |> Realtime.GenCounter.add() + rate + |> tap(&RateCounter.new(&1)) + |> tap(&GenCounter.add(&1.id)) + |> RateCounterHelper.tick!() end def fake_broadcast_from_database(external_id) do Realtime.Telemetry.execute( [:realtime, :tenants, :broadcast_from_database], %{ - latency_committed_at: 10, - latency_inserted_at: 1 + # millisecond + latency_committed_at: 9, + # microsecond + latency_inserted_at: 9000 }, %{tenant: external_id} ) end + + def fake_input_bytes(external_id) do + Realtime.Telemetry.execute([:realtime, :channel, :input_bytes], %{size: 10}, %{tenant: external_id}) + end + + def fake_output_bytes(external_id) do + Realtime.Telemetry.execute([:realtime, :channel, :output_bytes], %{size: 10}, %{tenant: external_id}) + end end end) @@ -75,6 +89,7 @@ defmodule Realtime.PromEx.Plugins.TenantTest do on_exit(fn -> :telemetry.detach(__MODULE__) end) + {:ok, _} = Realtime.Tenants.Connect.lookup_or_start_connection(tenant.external_id) {:ok, node} = Clustered.start(@aux_mod) %{tenant: tenant, node: node} end @@ -85,16 +100,19 @@ defmodule Realtime.PromEx.Plugins.TenantTest do } do UsersCounter.add(self(), external_id) # Add bad tenant id - UsersCounter.add(self(), random_string()) + bad_tenant_id = random_string() + UsersCounter.add(self(), bad_tenant_id) _ = Rpc.call(node, FakeUserCounter, :fake_add, [external_id]) + Process.sleep(500) Tenant.execute_tenant_metrics() assert_receive {[:realtime, :connections], %{connected: 1, limit: 200, connected_cluster: 2}, %{tenant: ^external_id}} - refute_receive :_ + refute_receive {[:realtime, :connections], %{connected: 1, limit: 200, connected_cluster: 2}, + %{tenant: ^bad_tenant_id}} end end @@ -113,47 +131,59 @@ defmodule Realtime.PromEx.Plugins.TenantTest do role: "anon" }) - start_supervised!(MetricsTest) - %{authorization_context: authorization_context, db_conn: db_conn, tenant: tenant} end test "event exists after counter added", %{tenant: %{external_id: external_id}} do - pattern = - ~r/realtime_channel_events{tenant="#{external_id}"}\s(?\d+)/ + metric_value = metric_value("realtime_channel_events", tenant: external_id) || 0 + FakeUserCounter.fake_event(external_id) + + Process.sleep(100) + assert metric_value("realtime_channel_events", tenant: external_id) == metric_value + 1 + end + + test "global event exists after counter added", %{tenant: %{external_id: external_id}} do + metric_value = metric_value("realtime_channel_global_events") || 0 - metric_value = metric_value(pattern) FakeUserCounter.fake_event(external_id) - Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 + Process.sleep(100) + assert metric_value("realtime_channel_global_events") == metric_value + 1 end test "db_event exists after counter added", %{tenant: %{external_id: external_id}} do - pattern = - ~r/realtime_channel_db_events{tenant="#{external_id}"}\s(?\d+)/ + metric_value = metric_value("realtime_channel_db_events", tenant: external_id) || 0 + FakeUserCounter.fake_db_event(external_id) + Process.sleep(100) + assert metric_value("realtime_channel_db_events", tenant: external_id) == metric_value + 1 + end + + test "global db_event exists after counter added", %{tenant: %{external_id: external_id}} do + metric_value = metric_value("realtime_channel_global_db_events") || 0 - metric_value = metric_value(pattern) FakeUserCounter.fake_db_event(external_id) - Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 + Process.sleep(100) + assert metric_value("realtime_channel_global_db_events") == metric_value + 1 end test "presence_event exists after counter added", %{tenant: %{external_id: external_id}} do - pattern = - ~r/realtime_channel_presence_events{tenant="#{external_id}"}\s(?\d+)/ + metric_value = metric_value("realtime_channel_presence_events", tenant: external_id) || 0 - metric_value = metric_value(pattern) FakeUserCounter.fake_presence_event(external_id) - Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 + Process.sleep(100) + assert metric_value("realtime_channel_presence_events", tenant: external_id) == metric_value + 1 end - test "metric read_authorization_check exists after check", context do - pattern = - ~r/realtime_tenants_read_authorization_check_count{tenant="#{context.tenant.external_id}"}\s(?\d+)/ + test "global presence_event exists after counter added", %{tenant: %{external_id: external_id}} do + metric_value = metric_value("realtime_channel_global_presence_events") || 0 + FakeUserCounter.fake_presence_event(external_id) + Process.sleep(100) + assert metric_value("realtime_channel_global_presence_events") == metric_value + 1 + end - metric_value = metric_value(pattern) + test "metric read_authorization_check exists after check", context do + metric = "realtime_tenants_read_authorization_check_count" + metric_value = metric_value(metric, tenant: context.tenant.external_id) || 0 {:ok, _} = Authorization.get_read_authorizations( @@ -164,19 +194,17 @@ defmodule Realtime.PromEx.Plugins.TenantTest do Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 - - bucket_pattern = - ~r/realtime_tenants_read_authorization_check_bucket{tenant="#{context.tenant.external_id}",le="250"}\s(?\d+)/ + assert metric_value(metric, tenant: context.tenant.external_id) == metric_value + 1 - assert metric_value(bucket_pattern) > 0 + assert metric_value("realtime_tenants_read_authorization_check_bucket", + tenant: context.tenant.external_id, + le: "250.0" + ) > 0 end test "metric write_authorization_check exists after check", context do - pattern = - ~r/realtime_tenants_write_authorization_check_count{tenant="#{context.tenant.external_id}"}\s(?\d+)/ - - metric_value = metric_value(pattern) + metric = "realtime_tenants_write_authorization_check_count" + metric_value = metric_value(metric, tenant: context.tenant.external_id) || 0 {:ok, _} = Authorization.get_write_authorizations( @@ -188,96 +216,110 @@ defmodule Realtime.PromEx.Plugins.TenantTest do # Wait enough time for the poll rate to be triggered at least once Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 + assert metric_value(metric, tenant: context.tenant.external_id) == metric_value + 1 + + assert metric_value("realtime_tenants_write_authorization_check_bucket", + tenant: context.tenant.external_id, + le: "250.0" + ) > 0 + end + + test "metric replay exists after check", context do + external_id = context.tenant.external_id + metric = "realtime_tenants_replay_count" + metric_value = metric_value(metric, tenant: external_id) || 0 + + assert {:ok, _, _} = Realtime.Messages.replay(context.db_conn, external_id, "test", 0, 1) - bucket_pattern = - ~r/realtime_tenants_write_authorization_check_bucket{tenant="#{context.tenant.external_id}",le="250"}\s(?\d+)/ + # Wait enough time for the poll rate to be triggered at least once + Process.sleep(200) - assert metric_value(bucket_pattern) > 0 + assert metric_value(metric, tenant: external_id) == metric_value + 1 + + assert metric_value("realtime_tenants_replay_bucket", tenant: external_id, le: "250.0") > 0 end test "metric realtime_tenants_broadcast_from_database_latency_committed_at exists after check", context do - pattern = - ~r/realtime_tenants_broadcast_from_database_latency_committed_at_count{tenant="#{context.tenant.external_id}"}\s(?\d+)/ + external_id = context.tenant.external_id + metric = "realtime_tenants_broadcast_from_database_latency_committed_at_count" + metric_value = metric_value(metric, tenant: external_id) || 0 - metric_value = metric_value(pattern) FakeUserCounter.fake_broadcast_from_database(context.tenant.external_id) Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 + assert metric_value(metric, tenant: external_id) == metric_value + 1 - bucket_pattern = - ~r/realtime_tenants_broadcast_from_database_latency_committed_at_bucket{tenant="#{context.tenant.external_id}",le="10"}\s(?\d+)/ - - assert metric_value(bucket_pattern) > 0 + assert metric_value("realtime_tenants_broadcast_from_database_latency_committed_at_bucket", + tenant: external_id, + le: "10.0" + ) > 0 end test "metric realtime_tenants_broadcast_from_database_latency_inserted_at exists after check", context do - pattern = - ~r/realtime_tenants_broadcast_from_database_latency_inserted_at_count{tenant="#{context.tenant.external_id}"}\s(?\d+)/ - - metric_value = metric_value(pattern) + external_id = context.tenant.external_id + metric = "realtime_tenants_broadcast_from_database_latency_inserted_at_count" + metric_value = metric_value(metric, tenant: external_id) || 0 FakeUserCounter.fake_broadcast_from_database(context.tenant.external_id) Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 - - bucket_pattern = - ~r/realtime_tenants_broadcast_from_database_latency_inserted_at_bucket{tenant="#{context.tenant.external_id}",le="5"}\s(?\d+)/ + assert metric_value(metric, tenant: external_id) == metric_value + 1 - assert metric_value(bucket_pattern) > 0 + assert metric_value("realtime_tenants_broadcast_from_database_latency_inserted_at_bucket", + tenant: external_id, + le: "10.0" + ) > 0 end test "tenant metric payload size", context do external_id = context.tenant.external_id - - pattern = - ~r/realtime_tenants_payload_size_count{tenant="#{external_id}"}\s(?\d+)/ - - metric_value = metric_value(pattern) + metric = "realtime_tenants_payload_size_count" + metric_value = metric_value(metric, message_type: "presence", tenant: external_id) || 0 message = %{topic: "a topic", event: "an event", payload: ["a", %{"b" => "c"}, 1, 23]} - RealtimeWeb.TenantBroadcaster.pubsub_broadcast(external_id, "a topic", message, Phoenix.PubSub) + RealtimeWeb.TenantBroadcaster.pubsub_broadcast(external_id, "a topic", message, Phoenix.PubSub, :presence) Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 - - bucket_pattern = - ~r/realtime_tenants_payload_size_bucket{tenant="#{external_id}",le="100"}\s(?\d+)/ + assert metric_value(metric, message_type: "presence", tenant: external_id) == metric_value + 1 - assert metric_value(bucket_pattern) > 0 + assert metric_value("realtime_tenants_payload_size_bucket", tenant: external_id, le: "250") > 0 end test "global metric payload size", context do external_id = context.tenant.external_id - pattern = ~r/realtime_payload_size_count\s(?\d+)/ - - metric_value = metric_value(pattern) + metric = "realtime_payload_size_count" + metric_value = metric_value(metric, message_type: "broadcast") || 0 message = %{topic: "a topic", event: "an event", payload: ["a", %{"b" => "c"}, 1, 23]} - RealtimeWeb.TenantBroadcaster.pubsub_broadcast(external_id, "a topic", message, Phoenix.PubSub) + RealtimeWeb.TenantBroadcaster.pubsub_broadcast(external_id, "a topic", message, Phoenix.PubSub, :broadcast) + + Process.sleep(200) + assert metric_value(metric, message_type: "broadcast") == metric_value + 1 + + assert metric_value("realtime_payload_size_bucket", le: "250.0") > 0 + end + + test "channel input bytes", context do + external_id = context.tenant.external_id + + FakeUserCounter.fake_input_bytes(external_id) + FakeUserCounter.fake_input_bytes(external_id) Process.sleep(200) - assert metric_value(pattern) == metric_value + 1 + assert metric_value("realtime_channel_input_bytes", tenant: external_id) == 20 + end - bucket_pattern = ~r/realtime_payload_size_bucket{le="100"}\s(?\d+)/ + test "channel output bytes", context do + external_id = context.tenant.external_id + + FakeUserCounter.fake_output_bytes(external_id) + FakeUserCounter.fake_output_bytes(external_id) - assert metric_value(bucket_pattern) > 0 + Process.sleep(200) + assert metric_value("realtime_channel_output_bytes", tenant: external_id) == 20 end end - defp metric_value(pattern) do - PromEx.get_metrics(MetricsTest) - |> String.split("\n", trim: true) - |> Enum.find_value( - "0", - fn item -> - case Regex.run(pattern, item, capture: ["number"]) do - [number] -> number - _ -> false - end - end - ) - |> String.to_integer() + defp metric_value(metric, expected_tags \\ nil) do + MetricsHelper.search(PromEx.get_metrics(MetricsTest), metric, expected_tags) end end diff --git a/test/realtime/monitoring/prom_ex/plugins/tenants_test.exs b/test/realtime/monitoring/prom_ex/plugins/tenants_test.exs index 080fd3cfb..4ebd99388 100644 --- a/test/realtime/monitoring/prom_ex/plugins/tenants_test.exs +++ b/test/realtime/monitoring/prom_ex/plugins/tenants_test.exs @@ -20,118 +20,107 @@ defmodule Realtime.PromEx.Plugins.TenantsTest do def exception, do: raise(RuntimeError) end - setup do - local_tenant = Containers.checkout_tenant(run_migrations: true) + setup_all do start_supervised!(MetricsTest) - {:ok, %{tenant: local_tenant}} + :ok end describe "event_metrics erpc" do - test "success" do - pattern = ~r/realtime_rpc_count{mechanism=\"erpc\",success="true",tenant="123"}\s(?\d+)/ + setup do + %{tenant: random_string()} + end + + test "global success", %{tenant: tenant} do + metric = "realtime_global_rpc_count" # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) - assert {:ok, "success"} = Rpc.enhanced_call(node(), Test, :success, [], tenant_id: "123") + previous_value = metric_value(metric, mechanism: "erpc", success: true) || 0 + assert {:ok, "success"} = Rpc.enhanced_call(node(), Test, :success, [], tenant_id: tenant) Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value(metric, mechanism: "erpc", success: true) == previous_value + 1 end - test "failure" do - pattern = ~r/realtime_rpc_count{mechanism=\"erpc\",success="false",tenant="123"}\s(?\d+)/ + test "global failure", %{tenant: tenant} do + metric = "realtime_global_rpc_count" # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) - assert {:error, "failure"} = Rpc.enhanced_call(node(), Test, :failure, [], tenant_id: "123") + previous_value = metric_value(metric, mechanism: "erpc", success: false) || 0 + assert {:error, "failure"} = Rpc.enhanced_call(node(), Test, :failure, [], tenant_id: tenant) Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value(metric, mechanism: "erpc", success: false) == previous_value + 1 end - test "exception" do - pattern = ~r/realtime_rpc_count{mechanism=\"erpc\",success="false",tenant="123"}\s(?\d+)/ + test "global exception", %{tenant: tenant} do + metric = "realtime_global_rpc_count" # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) + previous_value = metric_value(metric, mechanism: "erpc", success: false) || 0 assert {:error, :rpc_error, %RuntimeError{message: "runtime error"}} = - Rpc.enhanced_call(node(), Test, :exception, [], tenant_id: "123") + Rpc.enhanced_call(node(), Test, :exception, [], tenant_id: tenant) Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value(metric, mechanism: "erpc", success: false) == previous_value + 1 end end - test "event_metrics rpc" do - pattern = ~r/realtime_rpc_count{mechanism=\"rpc\",success="",tenant="123"}\s(?\d+)/ - # Enough time for the poll rate to be triggered at least once - Process.sleep(200) - previous_value = metric_value(pattern) - assert {:ok, "success"} = Rpc.call(node(), Test, :success, [], tenant_id: "123") - Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 - end - describe "event_metrics gen_rpc" do - test "success" do - pattern = ~r/realtime_rpc_count{mechanism=\"gen_rpc\",success="true",tenant="123"}\s(?\d+)/ + setup do + %{tenant: random_string()} + end + + test "global success", %{tenant: tenant} do + metric = "realtime_global_rpc_count" # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) - assert GenRpc.multicall(Test, :success, [], tenant_id: "123") == [{node(), {:ok, "success"}}] + previous_value = metric_value(metric, mechanism: "gen_rpc", success: true) || 0 + assert GenRpc.multicall(Test, :success, [], tenant_id: tenant) == [{node(), {:ok, "success"}}] Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value(metric, mechanism: "gen_rpc", success: true) == previous_value + 1 end - test "failure" do - pattern = ~r/realtime_rpc_count{mechanism=\"gen_rpc\",success="false",tenant="123"}\s(?\d+)/ + test "global failure", %{tenant: tenant} do + metric = "realtime_global_rpc_count" # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) - assert GenRpc.multicall(Test, :failure, [], tenant_id: "123") == [{node(), {:error, "failure"}}] + previous_value = metric_value(metric, mechanism: "gen_rpc", success: false) || 0 + assert GenRpc.multicall(Test, :failure, [], tenant_id: tenant) == [{node(), {:error, "failure"}}] Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value(metric, mechanism: "gen_rpc", success: false) == previous_value + 1 end - test "exception" do - pattern = ~r/realtime_rpc_count{mechanism=\"gen_rpc\",success="false",tenant="123"}\s(?\d+)/ + test "global exception", %{tenant: tenant} do + metric = "realtime_global_rpc_count" # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) - + previous_value = metric_value(metric, mechanism: "gen_rpc", success: false) || 0 node = node() assert assert [{^node, {:error, :rpc_error, {:EXIT, {%RuntimeError{message: "runtime error"}, _stacktrace}}}}] = - GenRpc.multicall(Test, :exception, [], tenant_id: "123") + GenRpc.multicall(Test, :exception, [], tenant_id: tenant) Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value(metric, mechanism: "gen_rpc", success: false) == previous_value + 1 end end describe "pooling metrics" do + setup do + local_tenant = Containers.checkout_tenant(run_migrations: true) + {:ok, %{tenant: local_tenant}} + end + test "conneted based on Connect module information for local node only", %{tenant: tenant} do - pattern = ~r/realtime_tenants_connected\s(?\d+)/ # Enough time for the poll rate to be triggered at least once Process.sleep(200) - previous_value = metric_value(pattern) + previous_value = metric_value("realtime_tenants_connected") {:ok, _} = Connect.lookup_or_start_connection(tenant.external_id) Process.sleep(200) - assert metric_value(pattern) == previous_value + 1 + assert metric_value("realtime_tenants_connected") == previous_value + 1 end end - defp metric_value(pattern) do - PromEx.get_metrics(MetricsTest) - |> String.split("\n", trim: true) - |> Enum.find_value( - "0", - fn item -> - case Regex.run(pattern, item, capture: ["number"]) do - [number] -> number - _ -> false - end - end - ) - |> String.to_integer() + defp metric_value(metric, expected_tags \\ nil) do + MetricsHelper.search(PromEx.get_metrics(MetricsTest), metric, expected_tags) end end diff --git a/test/realtime/monitoring/prom_ex_test.exs b/test/realtime/monitoring/prom_ex_test.exs index 849536543..a466e5efd 100644 --- a/test/realtime/monitoring/prom_ex_test.exs +++ b/test/realtime/monitoring/prom_ex_test.exs @@ -5,7 +5,7 @@ defmodule Realtime.PromExTest do describe "get_metrics/0" do test "builds metrics in prometheus format which includes host region and id" do - metrics = PromEx.get_metrics() + metrics = PromEx.get_metrics() |> IO.iodata_to_binary() assert String.contains?( metrics, @@ -16,27 +16,7 @@ defmodule Realtime.PromExTest do assert String.contains?( metrics, - "beam_system_schedulers_online_info{host=\"nohost\",region=\"us-east-1\",id=\"nohost\"}" - ) - end - end - - describe "get_compressed_metrics/0" do - test "builds metrics compressed using zlib" do - compressed_metrics = PromEx.get_compressed_metrics() - - metrics = :zlib.uncompress(compressed_metrics) - - assert String.contains?( - metrics, - "# HELP beam_system_schedulers_online_info The number of scheduler threads that are online." - ) - - assert String.contains?(metrics, "# TYPE beam_system_schedulers_online_info gauge") - - assert String.contains?( - metrics, - "beam_system_schedulers_online_info{host=\"nohost\",region=\"us-east-1\",id=\"nohost\"}" + "beam_system_schedulers_online_info{host=\"nohost\",id=\"nohost\",region=\"us-east-1\"}" ) end end diff --git a/test/realtime/monitoring/prometheus_test.exs b/test/realtime/monitoring/prometheus_test.exs new file mode 100644 index 000000000..ca7563ce0 --- /dev/null +++ b/test/realtime/monitoring/prometheus_test.exs @@ -0,0 +1,434 @@ +# Based on https://github.com/rkallos/peep/blob/708546ed069aebdf78ac1f581130332bd2e8b5b1/test/prometheus_test.exs +defmodule Realtime.Monitoring.PrometheusTest do + use ExUnit.Case, async: true + + alias Realtime.Monitoring.Prometheus + alias Telemetry.Metrics + + defmodule StorageCounter do + @moduledoc false + use Agent + + def start() do + Agent.start(fn -> 0 end, name: __MODULE__) + end + + def fresh_id() do + Agent.get_and_update(__MODULE__, fn i -> {:"#{i}", i + 1} end) + end + end + + # Test struct that doesn't implement String.Chars + defmodule TestError do + defstruct [:reason, :code] + end + + setup_all do + StorageCounter.start() + :ok + end + + @impls [:default, {:default, 4}, :striped] + + for impl <- @impls do + test "#{inspect(impl)} - counter formatting" do + counter = Metrics.counter("prometheus.test.counter", description: "a counter") + name = StorageCounter.fresh_id() + + opts = [ + name: name, + metrics: [counter], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + Peep.insert_metric(name, counter, 1, %{foo: :bar, baz: "quux"}) + + expected = [ + "# HELP prometheus_test_counter a counter", + "# TYPE prometheus_test_counter counter", + ~s(prometheus_test_counter{baz="quux",foo="bar"} 1) + ] + + assert export(name) == lines_to_string(expected) + end + + describe "#{inspect(impl)} - sum" do + test "sum formatting" do + name = StorageCounter.fresh_id() + sum = Metrics.sum("prometheus.test.sum", description: "a sum") + + opts = [ + name: name, + metrics: [sum], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + Peep.insert_metric(name, sum, 5, %{foo: :bar, baz: "quux"}) + Peep.insert_metric(name, sum, 3, %{foo: :bar, baz: "quux"}) + + expected = [ + "# HELP prometheus_test_sum a sum", + "# TYPE prometheus_test_sum counter", + ~s(prometheus_test_sum{baz="quux",foo="bar"} 8) + ] + + assert export(name) == lines_to_string(expected) + end + + test "custom type" do + name = StorageCounter.fresh_id() + + sum = + Metrics.sum("prometheus.test.sum", + description: "a sum", + reporter_options: [prometheus_type: "gauge"] + ) + + opts = [ + name: name, + metrics: [sum], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + Peep.insert_metric(name, sum, 5, %{foo: :bar, baz: "quux"}) + Peep.insert_metric(name, sum, 3, %{foo: :bar, baz: "quux"}) + + expected = [ + "# HELP prometheus_test_sum a sum", + "# TYPE prometheus_test_sum gauge", + ~s(prometheus_test_sum{baz="quux",foo="bar"} 8) + ] + + assert export(name) == lines_to_string(expected) + end + end + + describe "#{inspect(impl)} - last_value" do + test "formatting" do + name = StorageCounter.fresh_id() + last_value = Metrics.last_value("prometheus.test.gauge", description: "a last_value") + + opts = [ + name: name, + metrics: [last_value], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + Peep.insert_metric(name, last_value, 5, %{blee: :bloo, flee: "floo"}) + + expected = [ + "# HELP prometheus_test_gauge a last_value", + "# TYPE prometheus_test_gauge gauge", + ~s(prometheus_test_gauge{blee="bloo",flee="floo"} 5) + ] + + assert export(name) == lines_to_string(expected) + end + + test "custom type" do + name = StorageCounter.fresh_id() + + last_value = + Metrics.last_value("prometheus.test.gauge", + description: "a last_value", + reporter_options: [prometheus_type: :sum] + ) + + opts = [ + name: name, + metrics: [last_value], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + Peep.insert_metric(name, last_value, 5, %{blee: :bloo, flee: "floo"}) + + expected = [ + "# HELP prometheus_test_gauge a last_value", + "# TYPE prometheus_test_gauge sum", + ~s(prometheus_test_gauge{blee="bloo",flee="floo"} 5) + ] + + assert export(name) == lines_to_string(expected) + end + end + + test "#{inspect(impl)} - dist formatting" do + name = StorageCounter.fresh_id() + + dist = + Metrics.distribution("prometheus.test.distribution", + description: "a distribution", + reporter_options: [max_value: 1000] + ) + + opts = [ + name: name, + metrics: [dist], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + expected = [] + assert export(name) == lines_to_string(expected) + + Peep.insert_metric(name, dist, 1, %{glee: :gloo}) + + expected = [ + "# HELP prometheus_test_distribution a distribution", + "# TYPE prometheus_test_distribution histogram", + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.222222"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.493827"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.825789"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="2.23152"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="2.727413"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="3.333505"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="4.074283"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="4.97968"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="6.086275"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="7.438781"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="9.091843"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="11.112253"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="13.581642"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="16.599785"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="20.288626"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="24.79721"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="30.307701"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="37.042745"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="45.274466"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="55.335459"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="67.632227"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="82.661611"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="101.030858"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="123.48216"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="150.92264"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="184.461004"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="225.452339"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="275.552858"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="336.786827"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="411.628344"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="503.101309"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="614.9016"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="751.5464"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="918.556711"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1122.680424"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="+Inf"} 1), + ~s(prometheus_test_distribution_sum{glee="gloo"} 1), + ~s(prometheus_test_distribution_count{glee="gloo"} 1) + ] + + assert export(name) == lines_to_string(expected) + + for i <- 2..2000 do + Peep.insert_metric(name, dist, i, %{glee: :gloo}) + end + + expected = [ + "# HELP prometheus_test_distribution a distribution", + "# TYPE prometheus_test_distribution histogram", + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.222222"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.493827"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.825789"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="2.23152"} 2), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="2.727413"} 2), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="3.333505"} 3), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="4.074283"} 4), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="4.97968"} 4), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="6.086275"} 6), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="7.438781"} 7), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="9.091843"} 9), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="11.112253"} 11), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="13.581642"} 13), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="16.599785"} 16), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="20.288626"} 20), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="24.79721"} 24), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="30.307701"} 30), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="37.042745"} 37), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="45.274466"} 45), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="55.335459"} 55), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="67.632227"} 67), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="82.661611"} 82), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="101.030858"} 101), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="123.48216"} 123), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="150.92264"} 150), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="184.461004"} 184), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="225.452339"} 225), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="275.552858"} 275), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="336.786827"} 336), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="411.628344"} 411), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="503.101309"} 503), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="614.9016"} 614), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="751.5464"} 751), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="918.556711"} 918), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1122.680424"} 1122), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="+Inf"} 2000), + ~s(prometheus_test_distribution_sum{glee="gloo"} 2001000), + ~s(prometheus_test_distribution_count{glee="gloo"} 2000) + ] + + assert export(name) == lines_to_string(expected) + end + + test "#{inspect(impl)} - dist formatting pow10" do + name = StorageCounter.fresh_id() + + dist = + Metrics.distribution("prometheus.test.distribution", + description: "a distribution", + reporter_options: [ + max_value: 1000, + peep_bucket_calculator: Peep.Buckets.PowersOfTen + ] + ) + + opts = [ + name: name, + metrics: [dist], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + expected = [] + assert export(name) == lines_to_string(expected) + + Peep.insert_metric(name, dist, 1, %{glee: :gloo}) + + expected = [ + "# HELP prometheus_test_distribution a distribution", + "# TYPE prometheus_test_distribution histogram", + ~s(prometheus_test_distribution_bucket{glee="gloo",le="10.0"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="100.0"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e3"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e4"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e5"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e6"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e7"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e8"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e9"} 1), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="+Inf"} 1), + ~s(prometheus_test_distribution_sum{glee="gloo"} 1), + ~s(prometheus_test_distribution_count{glee="gloo"} 1) + ] + + assert export(name) == lines_to_string(expected) + + f = fn -> + for i <- 1..2000 do + Peep.insert_metric(name, dist, i, %{glee: :gloo}) + end + end + + 1..20 |> Enum.map(fn _ -> Task.async(f) end) |> Task.await_many() + + expected = + [ + "# HELP prometheus_test_distribution a distribution", + "# TYPE prometheus_test_distribution histogram", + ~s(prometheus_test_distribution_bucket{glee="gloo",le="10.0"} 181), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="100.0"} 1981), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e3"} 19981), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e4"} 40001), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e5"} 40001), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e6"} 40001), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e7"} 40001), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e8"} 40001), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="1.0e9"} 40001), + ~s(prometheus_test_distribution_bucket{glee="gloo",le="+Inf"} 40001), + ~s(prometheus_test_distribution_sum{glee="gloo"} 40020001), + ~s(prometheus_test_distribution_count{glee="gloo"} 40001) + ] + + assert export(name) == lines_to_string(expected) + end + + test "#{inspect(impl)} - regression: label escaping" do + name = StorageCounter.fresh_id() + + counter = + Metrics.counter( + "prometheus.test.counter", + description: "a counter" + ) + + opts = [ + name: name, + metrics: [counter], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + Peep.insert_metric(name, counter, 1, %{atom: "\"string\""}) + Peep.insert_metric(name, counter, 1, %{"\"string\"" => :atom}) + Peep.insert_metric(name, counter, 1, %{"\"string\"" => "\"string\""}) + Peep.insert_metric(name, counter, 1, %{"string" => "string\n"}) + + expected = [ + "# HELP prometheus_test_counter a counter", + "# TYPE prometheus_test_counter counter", + ~s(prometheus_test_counter{atom="\\\"string\\\""} 1), + ~s(prometheus_test_counter{\"string\"="atom"} 1), + ~s(prometheus_test_counter{\"string\"="\\\"string\\\""} 1), + ~s(prometheus_test_counter{string="string\\n"} 1) + ] + + assert export(name) == lines_to_string(expected) + end + + test "#{inspect(impl)} - regression: handle structs without String.Chars" do + name = StorageCounter.fresh_id() + + counter = + Metrics.counter( + "prometheus.test.counter", + description: "a counter" + ) + + opts = [ + name: name, + metrics: [counter], + storage: unquote(impl) + ] + + {:ok, _pid} = Peep.start_link(opts) + + # Create a struct that doesn't implement String.Chars + error_struct = %TestError{reason: :tcp_closed, code: 1001} + + Peep.insert_metric(name, counter, 1, %{error: error_struct}) + + result = export(name) + + # Should not crash and should contain the inspected struct representation + assert result =~ "prometheus_test_counter" + assert result =~ "TestError" + assert result =~ "tcp_closed" + end + end + + defp export(name) do + Peep.get_all_metrics(name) + |> Prometheus.export() + |> IO.iodata_to_binary() + end + + defp lines_to_string(lines) do + lines + |> Enum.map(&[&1, ?\n]) + |> Enum.concat(["# EOF\n"]) + |> IO.iodata_to_binary() + end +end diff --git a/test/realtime/nodes_test.exs b/test/realtime/nodes_test.exs index ba3b6be0e..b127ed605 100644 --- a/test/realtime/nodes_test.exs +++ b/test/realtime/nodes_test.exs @@ -4,6 +4,78 @@ defmodule Realtime.NodesTest do alias Realtime.Nodes alias Realtime.Tenants + defp spawn_fake_node(region, node) do + parent = self() + + fun = fn -> + :syn.join(RegionNodes, region, self(), node: node) + send(parent, :joined) + + receive do + :ok -> :ok + end + end + + {:ok, _pid} = start_supervised({Task, fun}, id: {region, node}) + assert_receive :joined + end + + describe "all_node_regions/0" do + test "returns all regions with nodes" do + spawn_fake_node("us-east-1", :node_1) + spawn_fake_node("ap-2", :node_2) + spawn_fake_node("ap-2", :node_3) + + assert Nodes.all_node_regions() |> Enum.sort() == ["ap-2", "us-east-1"] + end + + test "with no other nodes, returns my region only" do + assert Nodes.all_node_regions() == ["us-east-1"] + end + end + + describe "region_nodes/1" do + test "nil region returns empty list" do + assert Nodes.region_nodes(nil) == [] + end + + test "returns nodes from region" do + region = "ap-southeast-2" + spawn_fake_node(region, :node_1) + spawn_fake_node(region, :node_2) + + spawn_fake_node("eu-west-2", :node_3) + + assert Nodes.region_nodes(region) == [:node_1, :node_2] + assert Nodes.region_nodes("eu-west-2") == [:node_3] + end + + test "on non-existing region, returns empty list" do + assert Nodes.region_nodes("non-existing-region") == [] + end + end + + describe "node_from_region/2" do + test "nil region returns error" do + assert {:error, :not_available} = Nodes.node_from_region(nil, :any_key) + end + + test "empty region returns error" do + assert {:error, :not_available} = Nodes.node_from_region("empty-region", :any_key) + end + + test "returns the same node given the same key" do + region = "ap-southeast-3" + spawn_fake_node(region, :node_1) + spawn_fake_node(region, :node_2) + + spawn_fake_node("eu-west-3", :node_3) + + assert {:ok, :node_2} = Nodes.node_from_region(region, :key1) + assert {:ok, :node_2} = Nodes.node_from_region(region, :key1) + end + end + describe "get_node_for_tenant/1" do setup do tenant = Containers.checkout_tenant() @@ -16,10 +88,7 @@ defmodule Realtime.NodesTest do reject(&:syn.members/2) end - test "on existing tenant id, returns the node for the region using syn", %{ - tenant: tenant, - region: region - } do + test "on existing tenant id, returns the node for the region using syn", %{tenant: tenant, region: region} do expected_nodes = [:tenant@closest1, :tenant@closest2] expect(:syn, :members, fn RegionNodes, ^region -> @@ -39,7 +108,7 @@ defmodule Realtime.NodesTest do assert region == expected_region end - test "on existing tenant id, and a single node for a given region, returns default", %{ + test "on existing tenant id, and a single node for a given region, returns single node", %{ tenant: tenant, region: region } do @@ -48,7 +117,7 @@ defmodule Realtime.NodesTest do expected_region = Tenants.region(tenant) - assert node == node() + assert node != node() assert region == expected_region end diff --git a/test/realtime/postgres_decoder_test.exs b/test/realtime/postgres_decoder_test.exs index 9516e5e9a..bd9a0c579 100644 --- a/test/realtime/postgres_decoder_test.exs +++ b/test/realtime/postgres_decoder_test.exs @@ -2,24 +2,23 @@ defmodule Realtime.PostgresDecoderTest do use ExUnit.Case, async: true alias Realtime.Adapters.Postgres.Decoder - alias Decoder.Messages.{ - Begin, - Commit, - Origin, - Relation, - Relation.Column, - Insert, - Update, - Delete, - Truncate, - Type - } + alias Decoder.Messages.Begin + alias Decoder.Messages.Commit + alias Decoder.Messages.Insert + alias Decoder.Messages.Origin + alias Decoder.Messages.Relation + alias Decoder.Messages.Relation.Column + alias Decoder.Messages.Type + alias Decoder.Messages.Unsupported test "decodes begin messages" do {:ok, expected_dt_no_microseconds, 0} = DateTime.from_iso8601("2019-07-18T17:02:35Z") expected_dt = DateTime.add(expected_dt_no_microseconds, 726_322, :microsecond) - assert Decoder.decode_message(<<66, 0, 0, 0, 2, 167, 244, 168, 128, 0, 2, 48, 246, 88, 88, 213, 242, 0, 0, 2, 107>>) == + assert Decoder.decode_message( + <<66, 0, 0, 0, 2, 167, 244, 168, 128, 0, 2, 48, 246, 88, 88, 213, 242, 0, 0, 2, 107>>, + %{} + ) == %Begin{commit_timestamp: expected_dt, final_lsn: {2, 2_817_828_992}, xid: 619} end @@ -28,7 +27,8 @@ defmodule Realtime.PostgresDecoderTest do expected_dt = DateTime.add(expected_dt_no_microseconds, 726_322, :microsecond) assert Decoder.decode_message( - <<67, 0, 0, 0, 0, 2, 167, 244, 168, 128, 0, 0, 0, 2, 167, 244, 168, 176, 0, 2, 48, 246, 88, 88, 213, 242>> + <<67, 0, 0, 0, 0, 2, 167, 244, 168, 128, 0, 0, 0, 2, 167, 244, 168, 176, 0, 2, 48, 246, 88, 88, 213, 242>>, + %{} ) == %Commit{ flags: [], lsn: {2, 2_817_828_992}, @@ -38,7 +38,7 @@ defmodule Realtime.PostgresDecoderTest do end test "decodes origin messages" do - assert Decoder.decode_message(<<79, 0, 0, 0, 2, 167, 244, 168, 128>> <> "Elmer Fud") == + assert Decoder.decode_message(<<79, 0, 0, 0, 2, 167, 244, 168, 128>> <> "Elmer Fud", %{}) == %Origin{ origin_commit_lsn: {2, 2_817_828_992}, name: "Elmer Fud" @@ -48,7 +48,8 @@ defmodule Realtime.PostgresDecoderTest do test "decodes relation messages" do assert Decoder.decode_message( <<82, 0, 0, 96, 0, 112, 117, 98, 108, 105, 99, 0, 102, 111, 111, 0, 100, 0, 2, 0, 98, 97, 114, 0, 0, 0, 0, - 25, 255, 255, 255, 255, 1, 105, 100, 0, 0, 0, 0, 23, 255, 255, 255, 255>> + 25, 255, 255, 255, 255, 1, 105, 100, 0, 0, 0, 0, 23, 255, 255, 255, 255>>, + %{} ) == %Relation{ id: 24_576, namespace: "public", @@ -74,7 +75,8 @@ defmodule Realtime.PostgresDecoderTest do test "decodes type messages" do assert Decoder.decode_message( <<89, 0, 0, 128, 52, 112, 117, 98, 108, 105, 99, 0, 101, 120, 97, 109, 112, 108, 101, 95, 116, 121, 112, - 101, 0>> + 101, 0>>, + %{} ) == %Type{ id: 32_820, @@ -83,110 +85,103 @@ defmodule Realtime.PostgresDecoderTest do } end - describe "truncate messages" do - test "decodes messages" do - assert Decoder.decode_message(<<84, 0, 0, 0, 1, 0, 0, 0, 96, 0>>) == - %Truncate{ - number_of_relations: 1, - options: [], - truncated_relations: [24_576] - } - end - - test "decodes messages with cascade option" do - assert Decoder.decode_message(<<84, 0, 0, 0, 1, 1, 0, 0, 96, 0>>) == - %Truncate{ - number_of_relations: 1, - options: [:cascade], - truncated_relations: [24_576] - } - end - - test "decodes messages with restart identity option" do - assert Decoder.decode_message(<<84, 0, 0, 0, 1, 2, 0, 0, 96, 0>>) == - %Truncate{ - number_of_relations: 1, - options: [:restart_identity], - truncated_relations: [24_576] - } - end - end - describe "data message (TupleData) decoder" do - test "decodes insert messages" do - assert Decoder.decode_message( - <<73, 0, 0, 96, 0, 78, 0, 2, 116, 0, 0, 0, 3, 98, 97, 122, 116, 0, 0, 0, 3, 53, 54, 48>> - ) == %Insert{ - relation_id: 24_576, - tuple_data: {"baz", "560"} - } + setup do + relation = %{ + id: 24_576, + namespace: "public", + name: "foo", + columns: [ + %Column{name: "id", type: "uuid"}, + %Column{name: "bar", type: "text"} + ] + } + + %{relation: relation} end - test "decodes insert messages with null values" do - assert Decoder.decode_message(<<73, 0, 0, 96, 0, 78, 0, 2, 110, 116, 0, 0, 0, 3, 53, 54, 48>>) == %Insert{ - relation_id: 24_576, - tuple_data: {nil, "560"} - } - end + test "decodes insert messages", %{relation: relation} do + uuid = UUID.uuid4() + string = Generators.random_string() + + data = + "I" <> + <> <> + "N" <> + <<2::integer-16>> <> + "b" <> + <<16::integer-32>> <> + UUID.string_to_binary!(uuid) <> + "b" <> + <> <> + string - test "decodes insert messages with unchanged toasted values" do - assert Decoder.decode_message(<<73, 0, 0, 96, 0, 78, 0, 2, 117, 116, 0, 0, 0, 3, 53, 54, 48>>) == %Insert{ - relation_id: 24_576, - tuple_data: {:unchanged_toast, "560"} - } - end - - test "decodes update messages with default replica identity setting" do assert Decoder.decode_message( - <<85, 0, 0, 96, 0, 78, 0, 2, 116, 0, 0, 0, 7, 101, 120, 97, 109, 112, 108, 101, 116, 0, 0, 0, 3, 53, 54, - 48>> - ) == %Update{ - relation_id: 24_576, - changed_key_tuple_data: nil, - old_tuple_data: nil, - tuple_data: {"example", "560"} + data, + %{relation.id => relation} + ) == %Insert{ + relation_id: relation.id, + tuple_data: {uuid, string} } end - test "decodes update messages with FULL replica identity setting" do - assert Decoder.decode_message( - <<85, 0, 0, 96, 0, 79, 0, 2, 116, 0, 0, 0, 3, 98, 97, 122, 116, 0, 0, 0, 3, 53, 54, 48, 78, 0, 2, 116, 0, - 0, 0, 7, 101, 120, 97, 109, 112, 108, 101, 116, 0, 0, 0, 3, 53, 54, 48>> - ) == %Update{ - relation_id: 24_576, - changed_key_tuple_data: nil, - old_tuple_data: {"baz", "560"}, - tuple_data: {"example", "560"} - } - end + test "ignores unknown relations", %{relation: relation} do + uuid = UUID.uuid4() + string = Generators.random_string() + + data = + "I" <> + <<679::integer-32>> <> + "N" <> + <<2::integer-16>> <> + "b" <> + <<16::integer-32>> <> + UUID.string_to_binary!(uuid) <> + "b" <> + <> <> + string - test "decodes update messages with USING INDEX replica identity setting" do assert Decoder.decode_message( - <<85, 0, 0, 96, 0, 75, 0, 2, 116, 0, 0, 0, 3, 98, 97, 122, 110, 78, 0, 2, 116, 0, 0, 0, 7, 101, 120, 97, - 109, 112, 108, 101, 116, 0, 0, 0, 3, 53, 54, 48>> - ) == %Update{ - relation_id: 24_576, - changed_key_tuple_data: {"baz", nil}, - old_tuple_data: nil, - tuple_data: {"example", "560"} - } + data, + %{relation.id => relation} + ) == %Unsupported{} end - test "decodes DELETE messages with USING INDEX replica identity setting" do - assert Decoder.decode_message( - <<68, 0, 0, 96, 0, 75, 0, 2, 116, 0, 0, 0, 7, 101, 120, 97, 109, 112, 108, 101, 110>> - ) == %Delete{ - relation_id: 24_576, - changed_key_tuple_data: {"example", nil} + test "decodes insert messages with null values", %{relation: relation} do + string = Generators.random_string() + + data = + "I" <> + <> <> + "N" <> + <<2::integer-16>> <> + "n" <> + "b" <> + <> <> + string + + assert Decoder.decode_message(data, %{relation.id => relation}) == %Insert{ + relation_id: relation.id, + tuple_data: {nil, string} } end - test "decodes DELETE messages with FULL replica identity setting" do - assert Decoder.decode_message( - <<68, 0, 0, 96, 0, 79, 0, 2, 116, 0, 0, 0, 3, 98, 97, 122, 116, 0, 0, 0, 3, 53, 54, 48>> - ) == %Delete{ - relation_id: 24_576, - old_tuple_data: {"baz", "560"} + test "decodes insert messages with unchanged toasted values", %{relation: relation} do + string = Generators.random_string() + + data = + "I" <> + <> <> + "N" <> + <<2::integer-16>> <> + "u" <> + "b" <> + <> <> + string + + assert Decoder.decode_message(data, %{relation.id => relation}) == %Insert{ + relation_id: relation.id, + tuple_data: {:unchanged_toast, string} } end end diff --git a/test/realtime/rate_counter/rate_counter_test.exs b/test/realtime/rate_counter/rate_counter_test.exs index 6d3f57401..1c3d8af07 100644 --- a/test/realtime/rate_counter/rate_counter_test.exs +++ b/test/realtime/rate_counter/rate_counter_test.exs @@ -22,7 +22,7 @@ defmodule Realtime.RateCounterTest do max_bucket_len: 60, tick: 1000, tick_ref: _, - idle_shutdown: 900_000, + idle_shutdown: 600_000, idle_shutdown_ref: _, telemetry: %{emit: false}, limit: %{log: false} @@ -62,7 +62,7 @@ defmodule Realtime.RateCounterTest do max_bucket_len: 60, tick: 10, tick_ref: _, - idle_shutdown: 900_000, + idle_shutdown: 600_000, idle_shutdown_ref: _, telemetry: %{ emit: true, @@ -197,7 +197,7 @@ defmodule Realtime.RateCounterTest do id: id, opts: [ tick: 100, - max_bucket_len: 3, + max_bucket_len: 5, limit: [ value: 49, measurement: :sum, @@ -215,7 +215,7 @@ defmodule Realtime.RateCounterTest do avg: +0.0, sum: 0, bucket: _, - max_bucket_len: 3, + max_bucket_len: 5, telemetry: %{emit: false}, limit: %{ log: true, @@ -228,7 +228,7 @@ defmodule Realtime.RateCounterTest do log = capture_log(fn -> GenCounter.add(args.id, 100) - Process.sleep(100) + Process.sleep(120) end) assert {:ok, %RateCounter{sum: sum, limit: %{triggered: true}}} = RateCounter.get(args) @@ -239,7 +239,7 @@ defmodule Realtime.RateCounterTest do # Splitting by the error message returns the error message and the rest of the log only assert length(String.split(log, "ErrorMessage: Reason")) == 2 - Process.sleep(400) + Process.sleep(600) assert {:ok, %RateCounter{sum: 0, limit: %{triggered: false}}} = RateCounter.get(args) end @@ -260,10 +260,10 @@ defmodule Realtime.RateCounterTest do test "rate counters shut themselves down when no activity occurs on the GenCounter" do args = %Args{id: {:domain, :metric, Ecto.UUID.generate()}} - {:ok, pid} = RateCounter.new(args, idle_shutdown: 5) + {:ok, pid} = RateCounter.new(args, idle_shutdown: 100) Process.monitor(pid) - assert_receive {:DOWN, _ref, :process, ^pid, :normal}, 25 + assert_receive {:DOWN, _ref, :process, ^pid, :normal}, 200 # Cache has not expired yet assert {:ok, %RateCounter{}} = Cachex.get(RateCounter, args.id) Process.sleep(2000) @@ -316,37 +316,5 @@ defmodule Realtime.RateCounterTest do end end - describe "stop/1" do - test "stops rate counters for a given entity" do - entity_id = Ecto.UUID.generate() - fake_terms = Enum.map(1..10, fn _ -> {:domain, :"metric_#{random_string()}", Ecto.UUID.generate()} end) - terms = Enum.map(1..10, fn _ -> {:domain, :"metric_#{random_string()}", entity_id} end) - - for term <- terms do - args = %Args{id: term} - {:ok, _} = RateCounter.new(args) - assert {:ok, %RateCounter{}} = RateCounter.get(args) - end - - for term <- fake_terms do - args = %Args{id: term} - {:ok, _} = RateCounter.new(args) - assert {:ok, %RateCounter{}} = RateCounter.get(args) - end - - assert :ok = RateCounter.stop(entity_id) - # Wait for processes to shut down and Registry to update - Process.sleep(100) - - for term <- terms do - assert [] = Registry.lookup(Realtime.Registry.Unique, {RateCounter, :rate_counter, term}) - end - - for term <- fake_terms do - assert [{_pid, _value}] = Registry.lookup(Realtime.Registry.Unique, {RateCounter, :rate_counter, term}) - end - end - end - def handle_telemetry(event, measures, metadata, pid: pid), do: send(pid, {event, measures, metadata}) end diff --git a/test/realtime/repo_replica_test.exs b/test/realtime/repo_replica_test.exs index a3734d31b..0b988205b 100644 --- a/test/realtime/repo_replica_test.exs +++ b/test/realtime/repo_replica_test.exs @@ -1,14 +1,17 @@ defmodule Realtime.Repo.ReplicaTest do - use ExUnit.Case + # application env being changed + use ExUnit.Case, async: false alias Realtime.Repo.Replica setup do previous_platform = Application.get_env(:realtime, :platform) previous_region = Application.get_env(:realtime, :region) + previous_master_region = Application.get_env(:realtime, :master_region) on_exit(fn -> Application.put_env(:realtime, :platform, previous_platform) Application.put_env(:realtime, :region, previous_region) + Application.put_env(:realtime, :master_region, previous_master_region) end) end @@ -16,12 +19,20 @@ defmodule Realtime.Repo.ReplicaTest do for {region, mod} <- Replica.replicas_aws() do setup do Application.put_env(:realtime, :platform, :aws) + Application.put_env(:realtime, :master_region, "special-region") + :ok end test "handles #{region} region" do Application.put_env(:realtime, :region, unquote(region)) replica_asserts(unquote(mod), Replica.replica()) end + + test "defaults to Realtime.Repo if region is equal to master region on #{region}" do + Application.put_env(:realtime, :region, unquote(region)) + Application.put_env(:realtime, :master_region, unquote(region)) + replica_asserts(Realtime.Repo, Replica.replica()) + end end test "defaults to Realtime.Repo if region is not configured" do @@ -34,6 +45,8 @@ defmodule Realtime.Repo.ReplicaTest do for {region, mod} <- Replica.replicas_fly() do setup do Application.put_env(:realtime, :platform, :fly) + Application.put_env(:realtime, :master_region, "special-region") + :ok end test "handles #{region} region" do diff --git a/test/realtime/rpc_test.exs b/test/realtime/rpc_test.exs index 221cd781b..9c83d7064 100644 --- a/test/realtime/rpc_test.exs +++ b/test/realtime/rpc_test.exs @@ -81,8 +81,7 @@ defmodule Realtime.RpcTest do func: :test_success, origin_node: ^origin_node, target_node: ^node, - success: true, - tenant: "123" + success: true }} end @@ -100,8 +99,7 @@ defmodule Realtime.RpcTest do func: :test_raise, origin_node: ^origin_node, target_node: ^node, - success: false, - tenant: "123" + success: false }} end diff --git a/test/realtime/syn_handler_test.exs b/test/realtime/syn_handler_test.exs index 2b27cf322..96a2e316a 100644 --- a/test/realtime/syn_handler_test.exs +++ b/test/realtime/syn_handler_test.exs @@ -13,8 +13,15 @@ defmodule Realtime.SynHandlerTest do defmodule FakeConnect do use GenServer + def start_link([tenant_id, region, opts]) do + name = {Connect, tenant_id, %{conn: nil, region: region}} + gen_opts = [name: {:via, :syn, name}] + GenServer.start_link(FakeConnect, [tenant_id, opts], gen_opts) + end + def init([tenant_id, opts]) do - :syn.update_registry(Connect, tenant_id, fn _pid, meta -> %{meta | conn: "fake_conn"} end) + conn = Keyword.get(opts, :conn, "remote_conn") + :syn.update_registry(Connect, tenant_id, fn _pid, meta -> %{meta | conn: conn} end) if opts[:trap_exit], do: Process.flag(:trap_exit, true) @@ -28,125 +35,184 @@ defmodule Realtime.SynHandlerTest do Code.eval_quoted(@aux_mod) - defp assert_process_down(pid, reason \\ nil, timeout \\ 100) do - ref = Process.monitor(pid) + # > :"main@127.0.0.11" < :"atest@127.0.0.1" + # false + # iex(2)> :erlang.phash2("tenant123", 2) + # 0 + # iex(3)> :erlang.phash2("tenant999", 2) + # 1 + describe "integration test with a Connect conflict name=atest" do + setup do + {:ok, pid, node} = + Clustered.start_disconnected(@aux_mod, name: :atest, extra_config: [{:realtime, :region, "ap-southeast-2"}]) - if reason do - assert_receive {:DOWN, ^ref, :process, ^pid, ^reason}, timeout - else - assert_receive {:DOWN, ^ref, :process, ^pid, _reason}, timeout + %{peer_pid: pid, node: node} + end + + @tag tenant_id: "tenant999" + test "tenant hash = 1", %{node: node, peer_pid: peer_pid, tenant_id: tenant_id} do + assert :erlang.phash2(tenant_id, 2) == 1 + local_pid = start_supervised!({FakeConnect, [tenant_id, "us-east-1", [conn: "local_conn"]]}) + {:ok, remote_pid} = :peer.call(peer_pid, FakeConnect, :start_link, [[tenant_id, "ap-southeast-2", []]]) + on_exit(fn -> Process.exit(remote_pid, :brutal_kill) end) + + log = + capture_log(fn -> + # Connect to peer node to cause a conflict on syn + true = Node.connect(node) + # Give some time for the conflict resolution to happen on the other node + Process.sleep(500) + + # Both nodes agree + assert {^remote_pid, %{region: "ap-southeast-2", conn: "remote_conn"}} = + :peer.call(peer_pid, :syn, :lookup, [Connect, tenant_id]) + + assert {^remote_pid, %{region: "ap-southeast-2", conn: "remote_conn"}} = :syn.lookup(Connect, tenant_id) + + assert :peer.call(peer_pid, Process, :alive?, [remote_pid]) + + refute Process.alive?(local_pid) + end) + + assert log =~ "stop local process: #{inspect(local_pid)}" + assert log =~ "Successfully stopped #{inspect(local_pid)}" + + assert log =~ + "Elixir.Realtime.Tenants.Connect terminated due to syn conflict resolution: \"#{tenant_id}\" #{inspect(local_pid)}" + end + + @tag tenant_id: "tenant123" + test "tenant hash = 0", %{node: node, peer_pid: peer_pid, tenant_id: tenant_id} do + assert :erlang.phash2(tenant_id, 2) == 0 + {:ok, remote_pid} = :peer.call(peer_pid, FakeConnect, :start_link, [[tenant_id, "ap-southeast-2", []]]) + local_pid = start_supervised!({FakeConnect, [tenant_id, "us-east-1", [conn: "local_conn"]]}) + on_exit(fn -> Process.exit(remote_pid, :kill) end) + + log = + capture_log(fn -> + # Connect to peer node to cause a conflict on syn + true = Node.connect(node) + # Give some time for the conflict resolution to happen on the other node + Process.sleep(500) + + # Both nodes agree + assert {^local_pid, %{region: "us-east-1", conn: "local_conn"}} = :syn.lookup(Connect, tenant_id) + + assert {^local_pid, %{region: "us-east-1", conn: "local_conn"}} = + :peer.call(peer_pid, :syn, :lookup, [Connect, tenant_id]) + + refute :peer.call(peer_pid, Process, :alive?, [remote_pid]) + + assert Process.alive?(local_pid) + end) + + assert log =~ "remote process will be stopped: #{inspect(remote_pid)}" end end - describe "integration test with a Connect conflict" do + # > :"main@127.0.0.11" < :"test@127.0.0.1" + # true + # iex(2)> :erlang.phash2("tenant123", 2) + # 0 + # iex(3)> :erlang.phash2("tenant999", 2) + # 1 + describe "integration test with a Connect conflict name=test" do setup do - ensure_connect_down("dev_tenant") - {:ok, pid, node} = Clustered.start_disconnected(@aux_mod, extra_config: [{:realtime, :region, "ap-southeast-2"}]) - Endpoint.subscribe("connect:dev_tenant") + {:ok, pid, node} = + Clustered.start_disconnected(@aux_mod, name: :test, extra_config: [{:realtime, :region, "ap-southeast-2"}]) + %{peer_pid: pid, node: node} end - test "local node started first", %{node: node, peer_pid: peer_pid} do - external_id = "dev_tenant" - # start connect locally first - {:ok, db_conn} = Connect.lookup_or_start_connection(external_id) - assert Connect.ready?(external_id) - connect = Connect.whereis(external_id) - assert node(connect) == node() - - # Now let's force the remote node to start the fake Connect process - name = {Connect, external_id, %{conn: nil, region: "ap-southeast-2"}} - opts = [name: {:via, :syn, name}] - {:ok, remote_pid} = :peer.call(peer_pid, GenServer, :start_link, [FakeConnect, [external_id, []], opts]) + @tag tenant_id: "tenant999" + test "tenant hash = 1", %{node: node, peer_pid: peer_pid, tenant_id: tenant_id} do + assert :erlang.phash2(tenant_id, 2) == 1 + Endpoint.subscribe("connect:#{tenant_id}") + local_pid = start_supervised!({FakeConnect, [tenant_id, "us-east-1", [conn: "local_conn"]]}) + + {:ok, remote_pid} = :peer.call(peer_pid, FakeConnect, :start_link, [[tenant_id, "ap-southeast-2", []]]) + on_exit(fn -> Process.exit(remote_pid, :brutal_kill) end) log = capture_log(fn -> - Endpoint.subscribe("connect:dev_tenant") # Connect to peer node to cause a conflict on syn true = Node.connect(node) # Give some time for the conflict resolution to happen on the other node Process.sleep(500) # Both nodes agree - assert {^connect, %{region: "us-east-1", conn: ^db_conn}} = :syn.lookup(Connect, external_id) + assert {^local_pid, %{region: "us-east-1", conn: "local_conn"}} = :syn.lookup(Connect, tenant_id) - assert {^connect, %{region: "us-east-1", conn: ^db_conn}} = - :peer.call(peer_pid, :syn, :lookup, [Connect, external_id]) + assert {^local_pid, %{region: "us-east-1", conn: "local_conn"}} = + :peer.call(peer_pid, :syn, :lookup, [Connect, tenant_id]) refute :peer.call(peer_pid, Process, :alive?, [remote_pid]) - assert Process.alive?(connect) + assert Process.alive?(local_pid) end) assert log =~ "remote process will be stopped: #{inspect(remote_pid)}" end - test "remote node started first", %{node: node, peer_pid: peer_pid} do - external_id = "dev_tenant" + @tag tenant_id: "tenant123" + test "tenant hash = 0", %{node: node, peer_pid: peer_pid, tenant_id: tenant_id} do + assert :erlang.phash2(tenant_id, 2) == 0 # Start remote process first - name = {Connect, external_id, %{conn: nil, region: "ap-southeast-2"}} - opts = [name: {:via, :syn, name}] - {:ok, remote_pid} = :peer.call(peer_pid, GenServer, :start_link, [FakeConnect, [external_id, []], opts]) + {:ok, remote_pid} = :peer.call(peer_pid, FakeConnect, :start_link, [[tenant_id, "ap-southeast-2", []]]) + on_exit(fn -> Process.exit(remote_pid, :kill) end) # start connect locally later - {:ok, _db_conn} = Connect.lookup_or_start_connection(external_id) - assert Connect.ready?(external_id) - connect = Connect.whereis(external_id) - assert node(connect) == node() + local_pid = start_supervised!({FakeConnect, [tenant_id, "us-east-1", [conn: "local_conn"]]}) log = capture_log(fn -> # Connect to peer node to cause a conflict on syn true = Node.connect(node) - assert_process_down(connect) - assert_receive %{event: "connect_down"} + # Give some time for the conflict resolution to happen on the other node + Process.sleep(500) # Both nodes agree - assert {^remote_pid, %{region: "ap-southeast-2", conn: "fake_conn"}} = - :peer.call(peer_pid, :syn, :lookup, [Connect, external_id]) + assert {^remote_pid, %{region: "ap-southeast-2", conn: "remote_conn"}} = + :peer.call(peer_pid, :syn, :lookup, [Connect, tenant_id]) - assert {^remote_pid, %{region: "ap-southeast-2", conn: "fake_conn"}} = :syn.lookup(Connect, external_id) + assert {^remote_pid, %{region: "ap-southeast-2", conn: "remote_conn"}} = :syn.lookup(Connect, tenant_id) assert :peer.call(peer_pid, Process, :alive?, [remote_pid]) - refute Process.alive?(connect) + refute Process.alive?(local_pid) end) - assert log =~ "stop local process: #{inspect(connect)}" - assert log =~ "Successfully stopped #{inspect(connect)}" + assert log =~ "stop local process: #{inspect(local_pid)}" + assert log =~ "Successfully stopped #{inspect(local_pid)}" assert log =~ - "Elixir.Realtime.Tenants.Connect terminated due to syn conflict resolution: \"dev_tenant\" #{inspect(connect)}" + "Elixir.Realtime.Tenants.Connect terminated due to syn conflict resolution: \"#{tenant_id}\" #{inspect(local_pid)}" end - test "remote node started first but timed out stopping", %{node: node, peer_pid: peer_pid} do - external_id = "dev_tenant" + @tag tenant_id: "tenant123" + test "tenant hash = 0 but timed out stopping", %{node: node, peer_pid: peer_pid, tenant_id: tenant_id} do + assert :erlang.phash2(tenant_id, 2) == 0 # Start remote process first - name = {Connect, external_id, %{conn: nil, region: "ap-southeast-2"}} - opts = [name: {:via, :syn, name}] - {:ok, remote_pid} = :peer.call(peer_pid, GenServer, :start_link, [FakeConnect, [external_id, []], opts]) - on_exit(fn -> Process.exit(remote_pid, :brutal_kill) end) + {:ok, remote_pid} = :peer.call(peer_pid, FakeConnect, :start_link, [[tenant_id, "ap-southeast-2", []]]) - {:ok, local_pid} = - start_supervised(%{ - id: self(), - start: {GenServer, :start_link, [FakeConnect, [external_id, [trap_exit: true]], opts]} - }) + on_exit(fn -> Process.exit(remote_pid, :kill) end) + + # start connect locally later + local_pid = start_supervised!({FakeConnect, [tenant_id, "us-east-1", [conn: "local_conn", trap_exit: true]]}) log = capture_log(fn -> # Connect to peer node to cause a conflict on syn true = Node.connect(node) assert_process_down(local_pid, :killed, 6000) - assert_receive %{event: "connect_down"} # Both nodes agree - assert {^remote_pid, %{region: "ap-southeast-2", conn: "fake_conn"}} = - :peer.call(peer_pid, :syn, :lookup, [Connect, external_id]) + assert {^remote_pid, %{region: "ap-southeast-2", conn: "remote_conn"}} = + :peer.call(peer_pid, :syn, :lookup, [Connect, tenant_id]) - assert {^remote_pid, %{region: "ap-southeast-2", conn: "fake_conn"}} = :syn.lookup(Connect, external_id) + assert {^remote_pid, %{region: "ap-southeast-2", conn: "remote_conn"}} = :syn.lookup(Connect, tenant_id) assert :peer.call(peer_pid, Process, :alive?, [remote_pid]) @@ -157,7 +223,7 @@ defmodule Realtime.SynHandlerTest do assert log =~ "Timed out while waiting for process #{inspect(local_pid)} to stop. Sending kill exit signal" assert log =~ - "Elixir.Realtime.Tenants.Connect terminated due to syn conflict resolution: \"dev_tenant\" #{inspect(local_pid)}" + "Elixir.Realtime.Tenants.Connect terminated due to syn conflict resolution: \"#{tenant_id}\" #{inspect(local_pid)}" end end @@ -168,32 +234,50 @@ defmodule Realtime.SynHandlerTest do test "it handles :syn_conflict_resolution reason" do reason = :syn_conflict_resolution + pid = self() log = capture_log(fn -> - assert SynHandler.on_process_unregistered(@mod, @name, self(), %{}, reason) == :ok + assert SynHandler.on_process_unregistered(@mod, @name, pid, %{}, reason) == :ok end) topic = "#{@topic}:#{@name}" event = "#{@topic}_down" assert log =~ "#{@mod} terminated due to syn conflict resolution: #{inspect(@name)} #{inspect(self())}" - assert_receive %Phoenix.Socket.Broadcast{topic: ^topic, event: ^event, payload: nil} + assert_receive %Phoenix.Socket.Broadcast{topic: ^topic, event: ^event, payload: %{reason: ^reason, pid: ^pid}} end test "it handles other reasons" do reason = :other_reason + pid = self() log = capture_log(fn -> - assert SynHandler.on_process_unregistered(@mod, @name, self(), %{}, reason) == :ok + assert SynHandler.on_process_unregistered(@mod, @name, pid, %{}, reason) == :ok end) topic = "#{@topic}:#{@name}" event = "#{@topic}_down" refute log =~ "#{@mod} terminated: #{inspect(@name)} #{node()}" - assert_receive %Phoenix.Socket.Broadcast{topic: ^topic, event: ^event, payload: nil}, 500 + + assert_receive %Phoenix.Socket.Broadcast{ + topic: ^topic, + event: ^event, + payload: %{reason: ^reason, pid: ^pid} + }, + 500 + end + end + + defp assert_process_down(pid, reason, timeout) do + ref = Process.monitor(pid) + + if reason do + assert_receive {:DOWN, ^ref, :process, ^pid, ^reason}, timeout + else + assert_receive {:DOWN, ^ref, :process, ^pid, _reason}, timeout end end end diff --git a/test/realtime/tenants/authorization_remote_test.exs b/test/realtime/tenants/authorization_remote_test.exs index 53efe44ec..e21148fd1 100644 --- a/test/realtime/tenants/authorization_remote_test.exs +++ b/test/realtime/tenants/authorization_remote_test.exs @@ -100,8 +100,9 @@ defmodule Realtime.Tenants.AuthorizationRemoteTest do Authorization.get_read_authorizations(%Policies{}, pid, context.authorization_context) end - # Waiting for RateCounter to limit - Process.sleep(1100) + # Force RateCounter to tick + rate_counter = Realtime.Tenants.authorization_errors_per_second_rate(context.tenant) + RateCounterHelper.tick!(rate_counter) for _ <- 1..10 do {:error, :increase_connection_pool} = @@ -127,8 +128,9 @@ defmodule Realtime.Tenants.AuthorizationRemoteTest do Authorization.get_write_authorizations(%Policies{}, pid, context.authorization_context) end - # Waiting for RateCounter to limit - Process.sleep(1100) + # Force RateCounter to tick + rate_counter = Realtime.Tenants.authorization_errors_per_second_rate(context.tenant) + RateCounterHelper.tick!(rate_counter) for _ <- 1..10 do {:error, :increase_connection_pool} = @@ -184,8 +186,9 @@ defmodule Realtime.Tenants.AuthorizationRemoteTest do end) Task.await_many([t1, t2], 20_000) - # Wait for RateCounter to log - Process.sleep(1000) + # Force RateCounter to tick and log error + rate_counter = Realtime.Tenants.authorization_errors_per_second_rate(context.tenant) + RateCounterHelper.tick!(rate_counter) end) external_id = context.tenant.external_id @@ -241,7 +244,7 @@ defmodule Realtime.Tenants.AuthorizationRemoteTest do Connect.shutdown("dev_tenant") # Waiting for :syn to unregister Process.sleep(100) - Realtime.RateCounter.stop("dev_tenant") + RateCounterHelper.stop("dev_tenant") {:ok, local_db_conn} = Database.connect(tenant, "realtime_test", :stop) topic = random_string() diff --git a/test/realtime/tenants/authorization_test.exs b/test/realtime/tenants/authorization_test.exs index 724e6e933..10c9c0e09 100644 --- a/test/realtime/tenants/authorization_test.exs +++ b/test/realtime/tenants/authorization_test.exs @@ -8,7 +8,7 @@ defmodule Realtime.Tenants.AuthorizationTest do alias Realtime.Api.Message alias Realtime.Database - alias Realtime.Repo + alias Realtime.Tenants.Repo alias Realtime.Tenants.Authorization alias Realtime.Tenants.Authorization.Policies alias Realtime.Tenants.Authorization.Policies.BroadcastPolicies @@ -105,8 +105,9 @@ defmodule Realtime.Tenants.AuthorizationTest do Authorization.get_read_authorizations(%Policies{}, pid, context.authorization_context) end - # Waiting for RateCounter to limit - Process.sleep(1100) + # Force RateCounter to tick + rate_counter = Realtime.Tenants.authorization_errors_per_second_rate(context.tenant) + RateCounterHelper.tick!(rate_counter) # The next auth requests will not call the database due to being rate limited reject(&Database.transaction/4) @@ -118,9 +119,8 @@ defmodule Realtime.Tenants.AuthorizationTest do assert log =~ "IncreaseConnectionPool: Too many database timeouts" - # Only one log message should be emitted - # Splitting by the error message returns the error message and the rest of the log only - assert length(String.split(log, "IncreaseConnectionPool: Too many database timeouts")) == 2 + # Only one or two log messages should be emitted + assert length(String.split(log, "IncreaseConnectionPool: Too many database timeouts")) <= 3 end @tag role: "anon", policies: [] @@ -135,8 +135,9 @@ defmodule Realtime.Tenants.AuthorizationTest do Authorization.get_write_authorizations(%Policies{}, pid, context.authorization_context) end - # Waiting for RateCounter to limit - Process.sleep(1100) + # Force RateCounter to tick + rate_counter = Realtime.Tenants.authorization_errors_per_second_rate(context.tenant) + RateCounterHelper.tick!(rate_counter) # The next auth requests will not call the database due to being rate limited reject(&Database.transaction/4) @@ -192,8 +193,9 @@ defmodule Realtime.Tenants.AuthorizationTest do end) Task.await_many([t1, t2], 20_000) - # Wait for RateCounter log - Process.sleep(1000) + # Force RateCounter to tick and log error + rate_counter = Realtime.Tenants.authorization_errors_per_second_rate(context.tenant) + RateCounterHelper.tick!(rate_counter) end) external_id = context.tenant.external_id @@ -280,7 +282,7 @@ defmodule Realtime.Tenants.AuthorizationTest do def rls_context(context) do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) topic = context[:topic] || random_string() @@ -318,9 +320,9 @@ defmodule Realtime.Tenants.AuthorizationTest do extensions = [Map.from_struct(%{extension | :settings => settings})] - {:ok, tenant} = Realtime.Api.update_tenant(tenant, %{extensions: extensions}) + {:ok, tenant} = Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{extensions: extensions}) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) end end diff --git a/test/realtime/tenants/batch_broadcast_test.exs b/test/realtime/tenants/batch_broadcast_test.exs new file mode 100644 index 000000000..f5fa42764 --- /dev/null +++ b/test/realtime/tenants/batch_broadcast_test.exs @@ -0,0 +1,529 @@ +defmodule Realtime.Tenants.BatchBroadcastTest do + use RealtimeWeb.ConnCase, async: true + use Mimic + + alias Realtime.Database + alias Realtime.GenCounter + alias Realtime.RateCounter + alias Realtime.Tenants + alias Realtime.Tenants.BatchBroadcast + alias Realtime.Tenants.Authorization + alias Realtime.Tenants.Authorization.Policies + alias Realtime.Tenants.Authorization.Policies.BroadcastPolicies + alias Realtime.Tenants.Connect + + alias RealtimeWeb.TenantBroadcaster + + setup do + tenant = Containers.checkout_tenant(run_migrations: true) + Realtime.Tenants.Cache.update_cache(tenant) + {:ok, tenant: tenant} + end + + describe "public message broadcasting" do + test "broadcasts multiple public messages successfully", %{tenant: tenant} do + broadcast_events_key = Tenants.events_per_second_key(tenant) + topic1 = random_string() + topic2 = random_string() + + messages = %{ + messages: [ + %{topic: topic1, payload: %{"data" => "test1"}, event: "event1"}, + %{topic: topic2, payload: %{"data" => "test2"}, event: "event2"}, + %{topic: topic1, payload: %{"data" => "test3"}, event: "event3"} + ] + } + + expect(GenCounter, :add, 3, fn ^broadcast_events_key -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 3, fn _, _, _, _, _ -> :ok end) + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, false) + end + + test "public messages do not have private prefix in topic", %{tenant: tenant} do + broadcast_events_key = Tenants.events_per_second_key(tenant) + topic = random_string() + + messages = %{ + messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1"}] + } + + expect(GenCounter, :add, fn ^broadcast_events_key -> :ok end) + + expect(TenantBroadcaster, :pubsub_broadcast, fn _, topic, _, _, _ -> + refute String.contains?(topic, "-private") + end) + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, false) + end + end + + describe "message ID metadata" do + test "includes message ID in metadata when provided", %{tenant: tenant} do + broadcast_events_key = Tenants.events_per_second_key(tenant) + topic = random_string() + + messages = %{ + messages: [%{id: "msg-123", topic: topic, payload: %{"data" => "test"}, event: "event1"}] + } + + expect(GenCounter, :add, fn ^broadcast_events_key -> :ok end) + + expect(TenantBroadcaster, :pubsub_broadcast, fn _, _, broadcast, _, _ -> + assert %Phoenix.Socket.Broadcast{ + payload: %{ + "payload" => %{"data" => "test"}, + "event" => "event1", + "type" => "broadcast", + "meta" => %{"id" => "msg-123"} + } + } = broadcast + end) + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, false) + end + end + + describe "super user broadcasting" do + test "bypasses authorization for private messages with super_user flag", %{tenant: tenant} do + broadcast_events_key = Tenants.events_per_second_key(tenant) + topic1 = random_string() + topic2 = random_string() + + messages = %{ + messages: [ + %{topic: topic1, payload: %{"data" => "test1"}, event: "event1", private: true}, + %{topic: topic2, payload: %{"data" => "test2"}, event: "event2", private: true} + ] + } + + expect(GenCounter, :add, 2, fn ^broadcast_events_key -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 2, fn _, _, _, _, _ -> :ok end) + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, true) + end + + test "private messages have private prefix in topic", %{tenant: tenant} do + broadcast_events_key = Tenants.events_per_second_key(tenant) + topic = random_string() + + messages = %{ + messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1", private: true}] + } + + expect(GenCounter, :add, fn ^broadcast_events_key -> :ok end) + + expect(TenantBroadcaster, :pubsub_broadcast, fn _, topic, _, _, _ -> + assert String.contains?(topic, "-private") + end) + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, true) + end + end + + describe "private message authorization" do + test "broadcasts private messages with valid authorization", %{tenant: tenant} do + topic = random_string() + sub = random_string() + role = "authenticated" + + auth_params = %{ + tenant_id: tenant.external_id, + topic: topic, + headers: [{"header-1", "value-1"}], + claims: %{"sub" => sub, "role" => role, "exp" => Joken.current_time() + 1_000}, + role: role, + sub: sub + } + + messages = %{messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1", private: true}]} + + broadcast_events_key = Tenants.events_per_second_key(tenant) + + expect(GenCounter, :add, 1, fn ^broadcast_events_key -> :ok end) + + Authorization + |> expect(:build_authorization_params, fn params -> params end) + |> expect(:get_write_authorizations, fn _, _ -> {:ok, %Policies{broadcast: %BroadcastPolicies{write: true}}} end) + + expect(TenantBroadcaster, :pubsub_broadcast, 1, fn _, _, _, _, _ -> :ok end) + + assert :ok = BatchBroadcast.broadcast(auth_params, tenant, messages, false) + end + + test "skips private messages without authorization", %{tenant: tenant} do + topic = random_string() + sub = random_string() + role = "anon" + + auth_params = %{ + tenant_id: tenant.external_id, + topic: topic, + headers: [{"header-1", "value-1"}], + claims: %{"sub" => sub, "role" => role, "exp" => Joken.current_time() + 1_000}, + role: role, + sub: sub + } + + Authorization + |> expect(:build_authorization_params, 1, fn params -> params end) + |> expect(:get_write_authorizations, 1, fn _, _ -> + {:ok, %Policies{broadcast: %BroadcastPolicies{write: false}}} + end) + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + messages = %{ + messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1", private: true}] + } + + assert :ok = BatchBroadcast.broadcast(auth_params, tenant, messages, false) + + assert calls(&TenantBroadcaster.pubsub_broadcast/5) == [] + end + + test "broadcasts only authorized topics in mixed authorization batch", %{tenant: tenant} do + topic = random_string() + sub = random_string() + role = "authenticated" + + auth_params = %{ + tenant_id: tenant.external_id, + headers: [{"header-1", "value-1"}], + claims: %{"sub" => sub, "role" => role, "exp" => Joken.current_time() + 1_000}, + role: role, + sub: sub + } + + messages = %{ + messages: [ + %{topic: topic, payload: %{"data" => "test1"}, event: "event1", private: true}, + %{topic: random_string(), payload: %{"data" => "test2"}, event: "event2", private: true} + ] + } + + broadcast_events_key = Tenants.events_per_second_key(tenant) + + expect(GenCounter, :add, fn ^broadcast_events_key -> :ok end) + + Authorization + |> expect(:build_authorization_params, 2, fn params -> params end) + |> expect(:get_write_authorizations, 2, fn + _, %{topic: ^topic} -> %Policies{broadcast: %BroadcastPolicies{write: true}} + _, _ -> %Policies{broadcast: %BroadcastPolicies{write: false}} + end) + + # Only one topic will actually be broadcasted + expect(TenantBroadcaster, :pubsub_broadcast, 1, fn _, _, %Phoenix.Socket.Broadcast{topic: ^topic}, _, _ -> + :ok + end) + + assert :ok = BatchBroadcast.broadcast(auth_params, tenant, messages, false) + end + + test "groups messages by topic and checks authorization once per topic", %{tenant: tenant} do + topic_1 = random_string() + topic_2 = random_string() + sub = random_string() + role = "authenticated" + + auth_params = %{ + tenant_id: tenant.external_id, + headers: [{"header-1", "value-1"}], + claims: %{"sub" => sub, "role" => role, "exp" => Joken.current_time() + 1_000}, + role: role, + sub: sub + } + + messages = %{ + messages: [ + %{topic: topic_1, payload: %{"data" => "test1"}, event: "event1", private: true}, + %{topic: topic_2, payload: %{"data" => "test2"}, event: "event2", private: true}, + %{topic: topic_1, payload: %{"data" => "test3"}, event: "event3", private: true} + ] + } + + broadcast_events_key = Tenants.events_per_second_key(tenant) + + expect(GenCounter, :add, 3, fn ^broadcast_events_key -> :ok end) + + Authorization + |> expect(:build_authorization_params, 2, fn params -> params end) + |> expect(:get_write_authorizations, 2, fn _, _ -> + {:ok, %Policies{broadcast: %BroadcastPolicies{write: true}}} + end) + + expect(TenantBroadcaster, :pubsub_broadcast, 3, fn _, _, _, _, _ -> :ok end) + + assert :ok = BatchBroadcast.broadcast(auth_params, tenant, messages, false) + end + + test "handles missing auth params for private messages", %{tenant: tenant} do + events_per_second_rate = Tenants.events_per_second_rate(tenant) + + RateCounter + |> stub(:new, fn _ -> {:ok, nil} end) + |> stub(:get, fn ^events_per_second_rate -> {:ok, %RateCounter{avg: 0}} end) + + reject(&TenantBroadcaster.pubsub_broadcast/5) + reject(&Connect.lookup_or_start_connection/1) + + messages = %{ + messages: [%{topic: "topic1", payload: %{"data" => "test"}, event: "event1", private: true}] + } + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, false) + + assert calls(&TenantBroadcaster.pubsub_broadcast/5) == [] + end + end + + describe "mixed public and private messages" do + setup %{tenant: tenant} do + {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) + %{db_conn: db_conn} + end + + test "broadcasts both public and private messages together", %{tenant: tenant, db_conn: db_conn} do + topic = random_string() + sub = random_string() + role = "authenticated" + + create_rls_policies(db_conn, [:authenticated_write_broadcast], %{topic: topic}) + + auth_params = %{ + tenant_id: tenant.external_id, + topic: topic, + headers: [{"header-1", "value-1"}], + claims: %{"sub" => sub, "role" => role, "exp" => Joken.current_time() + 1_000}, + role: role, + sub: sub + } + + events_per_second_rate = Tenants.events_per_second_rate(tenant) + broadcast_events_key = Tenants.events_per_second_key(tenant) + + RateCounter + |> stub(:new, fn _ -> {:ok, nil} end) + |> stub(:get, fn + ^events_per_second_rate -> + {:ok, %RateCounter{avg: 0}} + + _ -> + {:ok, + %RateCounter{ + avg: 0, + limit: %{log: true, value: 10, measurement: :sum, triggered: false, log_fn: fn -> :ok end} + }} + end) + + expect(GenCounter, :add, 3, fn ^broadcast_events_key -> :ok end) + expect(Connect, :lookup_or_start_connection, fn _ -> {:ok, db_conn} end) + + Authorization + |> expect(:build_authorization_params, fn params -> params end) + |> expect(:get_write_authorizations, fn _, _ -> + {:ok, %Policies{broadcast: %BroadcastPolicies{write: true}}} + end) + + expect(TenantBroadcaster, :pubsub_broadcast, 3, fn _, _, _, _, _ -> :ok end) + + messages = %{ + messages: [ + %{topic: "public1", payload: %{"data" => "public"}, event: "event1", private: false}, + %{topic: topic, payload: %{"data" => "private"}, event: "event2", private: true}, + %{topic: "public2", payload: %{"data" => "public2"}, event: "event3"} + ] + } + + assert :ok = BatchBroadcast.broadcast(auth_params, tenant, messages, false) + + broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/5) + assert length(broadcast_calls) == 3 + end + end + + describe "Plug.Conn integration" do + test "accepts and converts Plug.Conn to auth params", %{tenant: tenant} do + topic = random_string() + broadcast_events_key = Tenants.events_per_second_key(tenant) + messages = %{messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1"}]} + + expect(GenCounter, :add, fn ^broadcast_events_key -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 1, fn _, _, _, _, _ -> :ok end) + + conn = + build_conn() + |> Map.put(:assigns, %{ + claims: %{"sub" => "user123", "role" => "authenticated"}, + role: "authenticated", + sub: "user123" + }) + |> Map.put(:req_headers, [{"authorization", "Bearer token"}]) + + assert :ok = BatchBroadcast.broadcast(conn, tenant, messages, false) + end + end + + describe "message validation" do + test "returns changeset error when topic is missing", %{tenant: tenant} do + messages = %{messages: [%{payload: %{"data" => "test"}, event: "event1"}]} + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + assert {:error, %Ecto.Changeset{valid?: false}} = result + end + + test "returns changeset error when payload is missing", %{tenant: tenant} do + topic = random_string() + messages = %{messages: [%{topic: topic, event: "event1"}]} + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + assert {:error, %Ecto.Changeset{valid?: false}} = result + end + + test "returns changeset error when event is missing", %{tenant: tenant} do + topic = random_string() + messages = %{messages: [%{topic: topic, payload: %{"data" => "test"}}]} + + reject(&TenantBroadcaster.pubsub_broadcast/5) + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + assert {:error, %Ecto.Changeset{valid?: false}} = result + end + + test "returns changeset error when messages array is empty", %{tenant: tenant} do + messages = %{messages: []} + reject(&TenantBroadcaster.pubsub_broadcast/5) + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + assert {:error, %Ecto.Changeset{valid?: false}} = result + end + end + + describe "rate limiting" do + test "rejects broadcast when rate limit is exceeded", %{tenant: tenant} do + events_per_second_rate = Tenants.events_per_second_rate(tenant) + topic = random_string() + messages = %{messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1"}]} + + RateCounter + |> stub(:new, fn _ -> {:ok, nil} end) + |> stub(:get, fn ^events_per_second_rate -> {:ok, %RateCounter{avg: tenant.max_events_per_second + 1}} end) + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + assert {:error, :too_many_requests, "You have exceeded your rate limit"} = result + end + + test "rejects broadcast when batch would exceed rate limit", %{tenant: tenant} do + events_per_second_rate = Tenants.events_per_second_rate(tenant) + + messages = %{ + messages: + Enum.map(1..10, fn _ -> + %{topic: random_string(), payload: %{"data" => "test"}, event: random_string()} + end) + } + + RateCounter + |> stub(:new, fn _ -> {:ok, nil} end) + |> stub(:get, fn ^events_per_second_rate -> + {:ok, %RateCounter{avg: tenant.max_events_per_second - 5}} + end) + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + + assert {:error, :too_many_requests, "Too many messages to broadcast, please reduce the batch size"} = result + end + + test "allows broadcast at rate limit boundary", %{tenant: tenant} do + events_per_second_rate = Tenants.events_per_second_rate(tenant) + broadcast_events_key = Tenants.events_per_second_key(tenant) + current_rate = tenant.max_events_per_second - 2 + + messages = %{ + messages: [ + %{topic: random_string(), payload: %{"data" => "test1"}, event: "event1"}, + %{topic: random_string(), payload: %{"data" => "test2"}, event: "event2"} + ] + } + + RateCounter + |> stub(:new, fn _ -> {:ok, nil} end) + |> stub(:get, fn ^events_per_second_rate -> + {:ok, %RateCounter{avg: current_rate}} + end) + + expect(GenCounter, :add, 2, fn ^broadcast_events_key -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 2, fn _, _, _, _, _ -> :ok end) + + assert :ok = BatchBroadcast.broadcast(nil, tenant, messages, false) + end + + test "rejects broadcast when payload size exceeds tenant limit", %{tenant: tenant} do + messages = %{ + messages: [ + %{ + topic: random_string(), + payload: %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 1)}, + event: "event1" + } + ] + } + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + result = BatchBroadcast.broadcast(nil, tenant, messages, false) + + assert {:error, + %Ecto.Changeset{ + valid?: false, + changes: %{messages: [%{errors: [payload: {"Payload size exceeds tenant limit", []}]}]} + }} = result + end + end + + describe "error handling" do + test "returns error when tenant is nil" do + messages = %{messages: [%{topic: "topic1", payload: %{"data" => "test"}, event: "event1"}]} + assert {:error, :tenant_not_found} = BatchBroadcast.broadcast(nil, nil, messages, false) + end + + test "gracefully handles database connection errors for private messages", %{tenant: tenant} do + topic = random_string() + sub = random_string() + role = "authenticated" + + auth_params = %{ + tenant_id: tenant.external_id, + headers: [{"header-1", "value-1"}], + claims: %{"sub" => sub, "role" => role, "exp" => Joken.current_time() + 1_000}, + role: role, + sub: sub + } + + events_per_second_rate = Tenants.events_per_second_rate(tenant) + + RateCounter + |> stub(:new, fn _ -> {:ok, nil} end) + |> stub(:get, fn ^events_per_second_rate -> {:ok, %RateCounter{avg: 0}} end) + + expect(Connect, :lookup_or_start_connection, fn _ -> {:error, :connection_failed} end) + + reject(&TenantBroadcaster.pubsub_broadcast/5) + + messages = %{ + messages: [%{topic: topic, payload: %{"data" => "test"}, event: "event1", private: true}] + } + + assert :ok = BatchBroadcast.broadcast(auth_params, tenant, messages, false) + + assert calls(&TenantBroadcaster.pubsub_broadcast/5) == [] + end + end +end diff --git a/test/realtime/tenants/cache_test.exs b/test/realtime/tenants/cache_test.exs index 1889c94ef..46577b802 100644 --- a/test/realtime/tenants/cache_test.exs +++ b/test/realtime/tenants/cache_test.exs @@ -1,11 +1,11 @@ defmodule Realtime.Tenants.CacheTest do - alias Realtime.Rpc # async: false due to the usage of dev_realtime tenant use Realtime.DataCase, async: false alias Realtime.Api - alias Realtime.Tenants.Cache + alias Realtime.Rpc alias Realtime.Tenants + alias Realtime.Tenants.Cache setup do {:ok, tenant: tenant_fixture()} @@ -15,10 +15,18 @@ defmodule Realtime.Tenants.CacheTest do test "tenants cache returns a cached result", %{tenant: tenant} do external_id = tenant.external_id assert %Api.Tenant{name: "tenant"} = Cache.get_tenant_by_external_id(external_id) - Api.update_tenant(tenant, %{name: "new name"}) + + changeset = Api.Tenant.changeset(tenant, %{name: "new name"}) + Repo.update!(changeset) assert %Api.Tenant{name: "new name"} = Tenants.get_tenant_by_external_id(external_id) assert %Api.Tenant{name: "tenant"} = Cache.get_tenant_by_external_id(external_id) end + + test "does not cache when tenant is not found" do + assert Cache.get_tenant_by_external_id("not found") == nil + + assert Cachex.exists?(Cache, {:get_tenant_by_external_id, "not found"}) == {:ok, false} + end end describe "invalidate_tenant_cache/1" do @@ -38,6 +46,18 @@ defmodule Realtime.Tenants.CacheTest do end end + describe "update_cache/1" do + test "updates the cache given a tenant", %{tenant: tenant} do + external_id = tenant.external_id + assert %Api.Tenant{name: "tenant"} = Cache.get_tenant_by_external_id(external_id) + # Update a tenant + updated_tenant = %{tenant | name: "updated name"} + # Update cache + Cache.update_cache(updated_tenant) + assert %Api.Tenant{name: "updated name"} = Cache.get_tenant_by_external_id(external_id) + end + end + describe "distributed_invalidate_tenant_cache/1" do setup do {:ok, node} = Clustered.start() @@ -51,25 +71,21 @@ defmodule Realtime.Tenants.CacheTest do dummy_name = random_string() # Ensure cache has the values - Cachex.put!( - Realtime.Tenants.Cache, - {{:get_tenant_by_external_id, 1}, [external_id]}, - {:cached, %{tenant | name: dummy_name}} - ) - - Rpc.enhanced_call(node, Cachex, :put!, [ - Realtime.Tenants.Cache, - {{:get_tenant_by_external_id, 1}, [external_id]}, - {:cached, %{tenant | name: dummy_name}} - ]) + Realtime.Tenants.Cache.update_cache(%{tenant | name: dummy_name}) + + Rpc.enhanced_call(node, Realtime.Tenants.Cache, :update_cache, [%{tenant | name: dummy_name}]) # Cache showing old value - assert %Api.Tenant{name: ^dummy_name} = Cache.get_tenant_by_external_id(external_id) - assert %Api.Tenant{name: ^dummy_name} = Rpc.enhanced_call(node, Cache, :get_tenant_by_external_id, [external_id]) + assert {:ok, %Api.Tenant{name: ^dummy_name}} = Cachex.get(Cache, {:get_tenant_by_external_id, external_id}) + + assert {:ok, %Api.Tenant{name: ^dummy_name}} = + Rpc.enhanced_call(node, Cachex, :get, [Cache, {:get_tenant_by_external_id, external_id}]) # Invalidate cache - assert true = Cache.distributed_invalidate_tenant_cache(external_id) + assert :ok = Cache.distributed_invalidate_tenant_cache(external_id) + # wait for cache to be invalidated in both nodes + Process.sleep(200) # Cache showing new value assert %Api.Tenant{name: ^expected_name} = Cache.get_tenant_by_external_id(external_id) @@ -77,4 +93,39 @@ defmodule Realtime.Tenants.CacheTest do Rpc.enhanced_call(node, Cache, :get_tenant_by_external_id, [external_id]) end end + + describe "global_cache_update/1" do + setup do + {:ok, node} = Clustered.start() + %{node: node} + end + + test "update the cache given a tenant_id", %{node: node} do + external_id = "dev_tenant" + %Api.Tenant{name: expected_name} = tenant = Tenants.get_tenant_by_external_id(external_id) + + dummy_name = random_string() + + # Ensure cache has the values + Realtime.Tenants.Cache.update_cache(%{tenant | name: dummy_name}) + + Rpc.enhanced_call(node, Cache, :update_cache, [%{tenant | name: dummy_name}]) + + # Cache showing old value + assert %Api.Tenant{name: ^dummy_name} = Cache.get_tenant_by_external_id(external_id) + assert %Api.Tenant{name: ^dummy_name} = Rpc.enhanced_call(node, Cache, :get_tenant_by_external_id, [external_id]) + + # Update cache + assert :ok = Cache.global_cache_update(tenant) + + # wait for cache to be updated in both nodes + Process.sleep(200) + + # Cache showing new value + assert {:ok, %Api.Tenant{name: ^expected_name}} = Cachex.get(Cache, {:get_tenant_by_external_id, external_id}) + + assert {:ok, %Api.Tenant{name: ^expected_name}} = + Rpc.enhanced_call(node, Cachex, :get, [Cache, {:get_tenant_by_external_id, external_id}]) + end + end end diff --git a/test/realtime/tenants/connect/register_process_test.exs b/test/realtime/tenants/connect/register_process_test.exs index d4227996f..02cc33391 100644 --- a/test/realtime/tenants/connect/register_process_test.exs +++ b/test/realtime/tenants/connect/register_process_test.exs @@ -7,7 +7,7 @@ defmodule Realtime.Tenants.Connect.RegisterProcessTest do setup do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) {:ok, conn} = Database.connect(tenant, "realtime_test") %{tenant_id: tenant.external_id, db_conn_pid: conn} end diff --git a/test/realtime/tenants/connect_test.exs b/test/realtime/tenants/connect_test.exs index 290fb1c8d..0b594de5d 100644 --- a/test/realtime/tenants/connect_test.exs +++ b/test/realtime/tenants/connect_test.exs @@ -50,7 +50,50 @@ defmodule Realtime.Tenants.ConnectTest do end end + describe "list_tenants/0" do + test "lists all tenants with active connections", %{tenant: tenant1} do + tenant2 = Containers.checkout_tenant(run_migrations: true) + assert {:ok, _} = Connect.lookup_or_start_connection(tenant1.external_id) + assert {:ok, _} = Connect.lookup_or_start_connection(tenant2.external_id) + + list_tenants = Connect.list_tenants() |> MapSet.new() + tenants = MapSet.new([tenant1.external_id, tenant2.external_id]) + + assert MapSet.subset?(tenants, list_tenants) + end + end + describe "handle cold start" do + test "multiple processes connecting calling Connect.connect", %{tenant: tenant} do + parent = self() + + # Let's slow down Connect.connect so that multiple RPC calls are executed + stub(Connect, :connect, fn x, y, z -> + :timer.sleep(1000) + call_original(Connect, :connect, [x, y, z]) + end) + + connect = fn -> send(parent, Connect.lookup_or_start_connection(tenant.external_id)) end + # Let's call enough times to potentially trigger the Connect RateCounter + + for _ <- 1..50, do: spawn(connect) + + assert_receive({:ok, pid}, 1100) + + for _ <- 1..49, do: assert_receive({:ok, ^pid}) + + # Does not trigger rate limit as connections eventually succeeded + + {:ok, rate_counter} = + tenant.external_id + |> Tenants.connect_errors_per_second_rate() + |> Realtime.RateCounter.get() + + assert rate_counter.sum == 0 + assert rate_counter.avg == 0.0 + assert rate_counter.limit.triggered == false + end + test "multiple proccesses succeed together", %{tenant: tenant} do parent = self() @@ -78,12 +121,55 @@ defmodule Realtime.Tenants.ConnectTest do assert_receive {:ok, ^pid} end - test "more than 5 seconds passed error out", %{tenant: tenant} do + test "more than 15 seconds passed error out", %{tenant: tenant} do parent = self() # Let's slow down Connect starting expect(Database, :check_tenant_connection, fn t -> - :timer.sleep(5500) + Process.sleep(15500) + call_original(Database, :check_tenant_connection, [t]) + end) + + connect = fn -> send(parent, Connect.lookup_or_start_connection(tenant.external_id)) end + + spawn(connect) + spawn(connect) + + {:error, :initializing} = Connect.lookup_or_start_connection(tenant.external_id) + # The above call waited 15 seconds + assert_receive {:error, :initializing} + assert_receive {:error, :initializing} + + # This one will succeed + {:ok, _pid} = Connect.lookup_or_start_connection(tenant.external_id) + end + + test "too many db connections", %{tenant: tenant} do + extension = %{ + "type" => "postgres_cdc_rls", + "settings" => %{ + "db_host" => "127.0.0.1", + "db_name" => "postgres", + "db_user" => "supabase_admin", + "db_password" => "postgres", + "poll_interval" => 100, + "poll_max_changes" => 100, + "poll_max_record_bytes" => 1_048_576, + "region" => "us-east-1", + "ssl_enforced" => false, + "db_pool" => 100, + "subcriber_pool_size" => 100, + "subs_pool_size" => 100 + } + } + + {:ok, tenant} = update_extension(tenant, extension) + + parent = self() + + # Let's slow down Connect starting + expect(Database, :check_tenant_connection, fn t -> + :timer.sleep(1000) call_original(Database, :check_tenant_connection, [t]) end) @@ -97,12 +183,13 @@ defmodule Realtime.Tenants.ConnectTest do spawn(connect) spawn(connect) - {:error, :tenant_database_unavailable} = Connect.lookup_or_start_connection(tenant.external_id) + # This one should block and wait for the first Connect + {:error, :tenant_db_too_many_connections} = Connect.lookup_or_start_connection(tenant.external_id) - # Only one will succeed the others timed out waiting - assert_receive {:error, :tenant_database_unavailable} - assert_receive {:error, :tenant_database_unavailable} - assert_receive {:ok, _pid}, 7000 + assert_receive {:error, :tenant_db_too_many_connections} + assert_receive {:error, :tenant_db_too_many_connections} + assert_receive {:error, :tenant_db_too_many_connections} + refute_receive _any end end @@ -254,9 +341,9 @@ defmodule Realtime.Tenants.ConnectTest do region = Tenants.region(tenant) assert {_pid, %{conn: ^db_conn, region: ^region}} = :syn.lookup(Connect, external_id) Process.sleep(1000) - :syn.leave(:users, external_id, self()) + external_id |> UsersCounter.scope() |> :syn.leave(external_id, self()) Process.sleep(1000) - assert :undefined = :syn.lookup(Connect, external_id) + assert :undefined = external_id |> UsersCounter.scope() |> :syn.lookup(external_id) refute Process.alive?(db_conn) Connect.shutdown(external_id) end @@ -267,6 +354,34 @@ defmodule Realtime.Tenants.ConnectTest do assert {:error, :tenant_suspended} = Connect.lookup_or_start_connection(tenant.external_id) end + test "tenant not able to connect if database has not enough connections", %{ + tenant: tenant + } do + extension = %{ + "type" => "postgres_cdc_rls", + "settings" => %{ + "db_host" => "127.0.0.1", + "db_name" => "postgres", + "db_user" => "supabase_admin", + "db_password" => "postgres", + "poll_interval" => 100, + "poll_max_changes" => 100, + "poll_max_record_bytes" => 1_048_576, + "region" => "us-east-1", + "ssl_enforced" => false, + "db_pool" => 100, + "subcriber_pool_size" => 100, + "subs_pool_size" => 100 + } + } + + {:ok, tenant} = update_extension(tenant, extension) + + assert capture_log(fn -> + assert {:error, :tenant_db_too_many_connections} = Connect.lookup_or_start_connection(tenant.external_id) + end) =~ ~r/Only \d+ available connections\. At least \d+ connections are required/ + end + test "handles tenant suspension and unsuspension in a reactive way", %{tenant: tenant} do assert {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) assert Connect.ready?(tenant.external_id) @@ -352,11 +467,13 @@ defmodule Realtime.Tenants.ConnectTest do assert replication_connection_before == replication_connection_after end - test "on replication connection postgres pid being stopped, also kills the Connect module", %{tenant: tenant} do + test "on replication connection postgres pid being stopped, Connect module recovers it", %{tenant: tenant} do assert {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) assert Connect.ready?(tenant.external_id) replication_connection_pid = ReplicationConnection.whereis(tenant.external_id) + Process.monitor(replication_connection_pid) + assert Process.alive?(replication_connection_pid) pid = Connect.whereis(tenant.external_id) @@ -366,21 +483,33 @@ defmodule Realtime.Tenants.ConnectTest do [] ) - assert_process_down(replication_connection_pid) - assert_process_down(pid) + assert_receive {:DOWN, _, :process, ^replication_connection_pid, _} + + Process.sleep(1500) + new_replication_connection_pid = ReplicationConnection.whereis(tenant.external_id) + + assert replication_connection_pid != new_replication_connection_pid + assert Process.alive?(new_replication_connection_pid) + assert Process.alive?(pid) end - test "on replication connection exit, also kills the Connect module", %{tenant: tenant} do + test "on replication connection exit, Connect module recovers it", %{tenant: tenant} do assert {:ok, _db_conn} = Connect.lookup_or_start_connection(tenant.external_id) assert Connect.ready?(tenant.external_id) replication_connection_pid = ReplicationConnection.whereis(tenant.external_id) + Process.monitor(replication_connection_pid) assert Process.alive?(replication_connection_pid) pid = Connect.whereis(tenant.external_id) Process.exit(replication_connection_pid, :kill) + assert_receive {:DOWN, _, :process, ^replication_connection_pid, _} - assert_process_down(replication_connection_pid) - assert_process_down(pid) + Process.sleep(1500) + new_replication_connection_pid = ReplicationConnection.whereis(tenant.external_id) + + assert replication_connection_pid != new_replication_connection_pid + assert Process.alive?(new_replication_connection_pid) + assert Process.alive?(pid) end test "handles max_wal_senders by logging the correct operational code", %{tenant: tenant} do @@ -429,6 +558,53 @@ defmodule Realtime.Tenants.ConnectTest do assert capture_log(fn -> assert {:error, :rpc_error, _} = Connect.lookup_or_start_connection("tenant") end) =~ "project=tenant external_id=tenant [error] ErrorOnRpcCall" end + + test "rate limit connect when too many connections against bad database", %{tenant: tenant} do + extension = %{ + "type" => "postgres_cdc_rls", + "settings" => %{ + "db_host" => "127.0.0.1", + "db_name" => "postgres", + "db_user" => "supabase_admin", + "db_password" => "postgres", + "poll_interval" => 100, + "poll_max_changes" => 100, + "poll_max_record_bytes" => 1_048_576, + "region" => "us-east-1", + "ssl_enforced" => true + } + } + + {:ok, tenant} = update_extension(tenant, extension) + + log = + capture_log(fn -> + res = + for _ <- 1..50 do + Process.sleep(200) + Connect.lookup_or_start_connection(tenant.external_id) + end + + assert Enum.any?(res, fn {_, res} -> res == :connect_rate_limit_reached end) + end) + + assert log =~ "DatabaseConnectionRateLimitReached: Too many connection attempts against the tenant database" + end + + test "rate limit connect will not trigger if connection is successful", %{tenant: tenant} do + log = + capture_log(fn -> + res = + for _ <- 1..20 do + Process.sleep(500) + Connect.lookup_or_start_connection(tenant.external_id) + end + + refute Enum.any?(res, fn {_, res} -> res == :tenant_db_too_many_connections end) + end) + + refute log =~ "DatabaseConnectionRateLimitReached: Too many connection attempts against the tenant database" + end end describe "shutdown/1" do @@ -449,30 +625,6 @@ defmodule Realtime.Tenants.ConnectTest do test "if tenant does not exist, does nothing" do assert :ok = Connect.shutdown("none") end - - test "tenant not able to connect if database has not enough connections", %{tenant: tenant} do - extension = %{ - "type" => "postgres_cdc_rls", - "settings" => %{ - "db_host" => "127.0.0.1", - "db_name" => "postgres", - "db_user" => "supabase_admin", - "db_password" => "postgres", - "poll_interval" => 100, - "poll_max_changes" => 100, - "poll_max_record_bytes" => 1_048_576, - "region" => "us-east-1", - "ssl_enforced" => false, - "db_pool" => 100, - "subcriber_pool_size" => 100, - "subs_pool_size" => 100 - } - } - - {:ok, tenant} = update_extension(tenant, extension) - - assert {:error, :tenant_db_too_many_connections} = Connect.lookup_or_start_connection(tenant.external_id) - end end describe "registers into local registry" do @@ -519,6 +671,6 @@ defmodule Realtime.Tenants.ConnectTest do put_in(extension, ["settings", "db_port"], db_port) ] - Realtime.Api.update_tenant(tenant, %{extensions: extensions}) + Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{extensions: extensions}) end end diff --git a/test/realtime/tenants/janitor/maintenance_task_test.exs b/test/realtime/tenants/janitor/maintenance_task_test.exs index f4c51436e..5d4aea474 100644 --- a/test/realtime/tenants/janitor/maintenance_task_test.exs +++ b/test/realtime/tenants/janitor/maintenance_task_test.exs @@ -4,20 +4,26 @@ defmodule Realtime.Tenants.Janitor.MaintenanceTaskTest do alias Realtime.Tenants.Janitor.MaintenanceTask alias Realtime.Api.Message alias Realtime.Database - alias Realtime.Repo + alias Realtime.Tenants.Repo setup do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) %{tenant: tenant} end test "cleans messages older than 72 hours and creates partitions", %{tenant: tenant} do + {:ok, conn} = Database.connect(tenant, "realtime_test", :stop) + utc_now = NaiveDateTime.utc_now() limit = NaiveDateTime.add(utc_now, -72, :hour) + date_start = Date.utc_today() |> Date.add(-10) + date_end = Date.utc_today() + create_messages_partitions(conn, date_start, date_end) + messages = for days <- -5..0 do inserted_at = NaiveDateTime.add(utc_now, days, :day) @@ -27,12 +33,11 @@ defmodule Realtime.Tenants.Janitor.MaintenanceTaskTest do to_keep = messages - |> Enum.reject(&(NaiveDateTime.compare(limit, &1.inserted_at) == :gt)) + |> Enum.reject(&(NaiveDateTime.compare(NaiveDateTime.beginning_of_day(limit), &1.inserted_at) == :gt)) |> MapSet.new() assert MaintenanceTask.run(tenant.external_id) == :ok - {:ok, conn} = Database.connect(tenant, "realtime_test", :stop) {:ok, res} = Repo.all(conn, from(m in Message), Message) verify_partitions(conn) @@ -63,7 +68,7 @@ defmodule Realtime.Tenants.Janitor.MaintenanceTaskTest do tenant = tenant_fixture(%{extensions: extensions}) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) Process.flag(:trap_exit, true) @@ -80,7 +85,7 @@ defmodule Realtime.Tenants.Janitor.MaintenanceTaskTest do defp verify_partitions(conn) do today = Date.utc_today() - yesterday = Date.add(today, -1) + yesterday = Date.add(today, -3) future = Date.add(today, 3) dates = Date.range(yesterday, future) diff --git a/test/realtime/tenants/janitor_test.exs b/test/realtime/tenants/janitor_test.exs index 4ac1a0eda..aa32b86f8 100644 --- a/test/realtime/tenants/janitor_test.exs +++ b/test/realtime/tenants/janitor_test.exs @@ -6,9 +6,9 @@ defmodule Realtime.Tenants.JanitorTest do alias Realtime.Api.Message alias Realtime.Database - alias Realtime.Repo alias Realtime.Tenants.Janitor alias Realtime.Tenants.Connect + alias Realtime.Tenants.Repo setup do :ets.delete_all_objects(Connect) @@ -24,13 +24,21 @@ defmodule Realtime.Tenants.JanitorTest do Enum.map( [tenant1, tenant2], fn tenant -> - tenant = Repo.preload(tenant, :extensions) + tenant = Realtime.Repo.preload(tenant, :extensions) Connect.lookup_or_start_connection(tenant.external_id) Process.sleep(500) tenant end ) + date_start = Date.utc_today() |> Date.add(-10) + date_end = Date.utc_today() + + Enum.map(tenants, fn tenant -> + {:ok, conn} = Database.connect(tenant, "realtime_test", :stop) + create_messages_partitions(conn, date_start, date_end) + end) + start_supervised!( {Task.Supervisor, name: Realtime.Tenants.Janitor.TaskSupervisor, max_children: 5, max_seconds: 500, max_restarts: 1} @@ -62,7 +70,7 @@ defmodule Realtime.Tenants.JanitorTest do to_keep = messages - |> Enum.reject(&(NaiveDateTime.compare(limit, &1.inserted_at) == :gt)) + |> Enum.reject(&(NaiveDateTime.compare(NaiveDateTime.beginning_of_day(limit), &1.inserted_at) == :gt)) |> MapSet.new() start_supervised!(Janitor) @@ -105,7 +113,7 @@ defmodule Realtime.Tenants.JanitorTest do to_keep = messages - |> Enum.reject(&(NaiveDateTime.compare(limit, &1.inserted_at) == :gt)) + |> Enum.reject(&(NaiveDateTime.compare(NaiveDateTime.beginning_of_day(limit), &1.inserted_at) == :gt)) |> MapSet.new() start_supervised!(Janitor) @@ -162,7 +170,7 @@ defmodule Realtime.Tenants.JanitorTest do defp verify_partitions(conn) do today = Date.utc_today() - yesterday = Date.add(today, -1) + yesterday = Date.add(today, -3) future = Date.add(today, 3) dates = Date.range(yesterday, future) diff --git a/test/realtime/tenants/rebalancer_test.exs b/test/realtime/tenants/rebalancer_test.exs index ac8e1ea36..d91e7e675 100644 --- a/test/realtime/tenants/rebalancer_test.exs +++ b/test/realtime/tenants/rebalancer_test.exs @@ -9,7 +9,7 @@ defmodule Realtime.Tenants.RebalancerTest do setup do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) %{tenant: tenant} end diff --git a/test/realtime/tenants/replication_connection_test.exs b/test/realtime/tenants/replication_connection_test.exs index 783270313..031f3cae6 100644 --- a/test/realtime/tenants/replication_connection_test.exs +++ b/test/realtime/tenants/replication_connection_test.exs @@ -11,6 +11,9 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do alias Realtime.Tenants alias Realtime.Tenants.ReplicationConnection alias RealtimeWeb.Endpoint + alias Realtime.Tenants.Repo + + @replication_slot_name "supabase_realtime_messages_replication_slot_test" setup do slot = Application.get_env(:realtime, :slot_name_suffix) @@ -20,11 +23,9 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do tenant = Containers.checkout_tenant(run_migrations: true) {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) - name = "supabase_realtime_messages_replication_slot_test" - Postgrex.query(db_conn, "SELECT pg_drop_replication_slot($1)", [name]) - Process.exit(db_conn, :normal) + Postgrex.query(db_conn, "SELECT pg_drop_replication_slot($1)", [@replication_slot_name]) - %{tenant: tenant} + %{tenant: tenant, db_conn: db_conn} end describe "temporary process" do @@ -70,7 +71,7 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do assert {:error, _} = ReplicationConnection.start(tenant, self()) end - test "starts a handler for the tenant and broadcasts", %{tenant: tenant} do + test "starts a handler for the tenant and broadcasts", %{tenant: tenant, db_conn: db_conn} do start_link_supervised!( {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, restart: :transient @@ -98,8 +99,8 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do payload = %{ "event" => "INSERT", + "meta" => %{"id" => row.id}, "payload" => %{ - "id" => row.id, "value" => value }, "type" => "broadcast" @@ -121,8 +122,89 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do }) end - {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) - {:ok, _} = Realtime.Repo.insert_all_entries(db_conn, messages, Message) + {:ok, _} = Repo.insert_all_entries(db_conn, messages, Message) + + messages_received = + for _ <- 1..total_messages, into: [] do + assert_receive {:socket_push, :text, data} + data |> IO.iodata_to_binary() |> Jason.decode!() + end + + for row <- messages do + assert Enum.count(messages_received, fn message_received -> + value = row |> Map.from_struct() |> get_in([:changes, :payload, "value"]) + + match?( + %{ + "event" => "broadcast", + "payload" => %{ + "event" => "INSERT", + "meta" => %{"id" => _id}, + "payload" => %{ + "value" => ^value + } + }, + "ref" => nil, + "topic" => ^topic + }, + message_received + ) + end) == 1 + end + end + + test "starts a handler for the tenant and broadcasts to public channel", %{tenant: tenant, db_conn: db_conn} do + start_link_supervised!( + {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, + restart: :transient + ) + + topic = random_string() + tenant_topic = Tenants.tenant_topic(tenant.external_id, topic, true) + subscribe(tenant_topic, topic) + + total_messages = 5 + # Works with one insert per transaction + for _ <- 1..total_messages do + value = random_string() + + row = + message_fixture(tenant, %{ + "topic" => topic, + "private" => false, + "event" => "INSERT", + "payload" => %{"value" => value} + }) + + assert_receive {:socket_push, :text, data} + message = data |> IO.iodata_to_binary() |> Jason.decode!() + + payload = %{ + "event" => "INSERT", + "meta" => %{"id" => row.id}, + "payload" => %{ + "value" => value + }, + "type" => "broadcast" + } + + assert message == %{"event" => "broadcast", "payload" => payload, "ref" => nil, "topic" => topic} + end + + Process.sleep(500) + # Works with batch inserts + messages = + for _ <- 1..total_messages do + Message.changeset(%Message{}, %{ + "topic" => topic, + "private" => false, + "event" => "INSERT", + "extension" => "broadcast", + "payload" => %{"value" => random_string()} + }) + end + + {:ok, _} = Repo.insert_all_entries(db_conn, messages, Message) messages_received = for _ <- 1..total_messages, into: [] do @@ -139,8 +221,8 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do "event" => "broadcast", "payload" => %{ "event" => "INSERT", + "meta" => %{"id" => _id}, "payload" => %{ - "id" => _, "value" => ^value } }, @@ -153,6 +235,113 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do end end + test "replicates binary with exactly 16 bytes to test UUID conversion error", %{tenant: tenant} do + start_link_supervised!( + {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, + restart: :transient + ) + + topic = "db:job_scheduler" + tenant_topic = Tenants.tenant_topic(tenant.external_id, topic, false) + subscribe(tenant_topic, topic) + payload = %{"value" => random_string()} + + row = + message_fixture(tenant, %{ + "topic" => topic, + "private" => true, + "event" => "UPDATE", + "extension" => "broadcast", + "payload" => payload + }) + + row_id = row.id + + assert_receive {:socket_push, :text, data}, 2000 + message = data |> IO.iodata_to_binary() |> Jason.decode!() + + assert %{ + "event" => "broadcast", + "payload" => %{ + "event" => "UPDATE", + "meta" => %{"id" => ^row_id}, + "payload" => received_payload, + "type" => "broadcast" + }, + "ref" => nil, + "topic" => ^topic + } = message + + assert received_payload == payload + end + + test "should not process unsupported relations", %{tenant: tenant, db_conn: db_conn} do + # update + queries = [ + "DROP TABLE IF EXISTS public.test", + """ + CREATE TABLE "public"."test" ( + "id" int4 NOT NULL default nextval('test_id_seq'::regclass), + "details" text, + PRIMARY KEY ("id")); + """ + ] + + Postgrex.transaction(db_conn, fn conn -> + Enum.each(queries, &Postgrex.query!(conn, &1, [])) + end) + + logs = + capture_log(fn -> + start_link_supervised!( + {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, + restart: :transient + ) + + assert_replication_started(db_conn, @replication_slot_name) + assert_publication_contains_only_messages(db_conn, "supabase_realtime_messages_publication") + + # Add table to publication to test the error handling + Postgrex.query!(db_conn, "ALTER PUBLICATION supabase_realtime_messages_publication ADD TABLE public.test", []) + %{rows: [[_id]]} = Postgrex.query!(db_conn, "insert into test (details) values ('test') returning id", []) + + topic = "db:job_scheduler" + tenant_topic = Tenants.tenant_topic(tenant.external_id, topic, false) + subscribe(tenant_topic, topic) + payload = %{"value" => random_string()} + + row = + message_fixture(tenant, %{ + "topic" => topic, + "private" => true, + "event" => "UPDATE", + "extension" => "broadcast", + "payload" => payload + }) + + row_id = row.id + + assert_receive {:socket_push, :text, data}, 2000 + message = data |> IO.iodata_to_binary() |> Jason.decode!() + + assert %{ + "event" => "broadcast", + "payload" => %{ + "event" => "UPDATE", + "meta" => %{"id" => ^row_id}, + "payload" => received_payload, + "type" => "broadcast" + }, + "ref" => nil, + "topic" => ^topic + } = message + + assert received_payload == payload + end) + + assert logs =~ "Unexpected relation on schema 'public' and table 'test'" + end + test "monitored pid stopping brings down ReplicationConnection ", %{tenant: tenant} do monitored_pid = spawn(fn -> @@ -204,7 +393,32 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do assert logs =~ "UnableToBroadcastChanges" end - test "payload without id", %{tenant: tenant} do + test "message that exceeds payload size logs error", %{tenant: tenant} do + logs = + capture_log(fn -> + start_supervised!( + {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, + restart: :transient + ) + + topic = random_string() + tenant_topic = Tenants.tenant_topic(tenant.external_id, topic, false) + assert :ok = Endpoint.subscribe(tenant_topic) + + message_fixture(tenant, %{ + "event" => random_string(), + "topic" => random_string(), + "private" => true, + "payload" => %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 1)} + }) + + refute_receive %Phoenix.Socket.Broadcast{}, 500 + end) + + assert logs =~ "UnableToBroadcastChanges: %{messages: [%{payload: [\"Payload size exceeds tenant limit\"]}]}" + end + + test "payload without id", %{tenant: tenant, db_conn: db_conn} do start_link_supervised!( {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, restart: :transient @@ -214,33 +428,39 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do tenant_topic = Tenants.tenant_topic(tenant.external_id, topic, false) subscribe(tenant_topic, topic) - fixture = - message_fixture(tenant, %{ - "topic" => topic, - "private" => true, - "event" => "INSERT", - "payload" => %{"value" => "something"} - }) + value = "something" + event = "INSERT" + + Postgrex.query!( + db_conn, + "SELECT realtime.send (json_build_object ('value', $1 :: text)::jsonb, $2 :: text, $3 :: text, TRUE::bool);", + [value, event, topic] + ) + + {:ok, [%{id: id}]} = Repo.all(db_conn, from(m in Message), Message) assert_receive {:socket_push, :text, data}, 500 message = data |> IO.iodata_to_binary() |> Jason.decode!() assert %{ "event" => "broadcast", - "payload" => %{"event" => "INSERT", "payload" => payload, "type" => "broadcast"}, + "payload" => %{ + "event" => "INSERT", + "meta" => %{"id" => ^id}, + "payload" => payload, + "type" => "broadcast" + }, "ref" => nil, "topic" => ^topic } = message - id = fixture.id - assert payload == %{ "value" => "something", "id" => id } end - test "payload including id", %{tenant: tenant} do + test "payload including id", %{tenant: tenant, db_conn: db_conn} do start_link_supervised!( {ReplicationConnection, %ReplicationConnection{tenant_id: tenant.external_id, monitored_pid: self()}}, restart: :transient @@ -250,21 +470,29 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do tenant_topic = Tenants.tenant_topic(tenant.external_id, topic, false) subscribe(tenant_topic, topic) - payload = %{"value" => "something", "id" => "123456"} + id = "123456" + value = "something" + event = "INSERT" - message_fixture(tenant, %{ - "topic" => topic, - "private" => true, - "event" => "INSERT", - "payload" => payload - }) + Postgrex.query!( + db_conn, + "SELECT realtime.send (json_build_object ('value', $1 :: text, 'id', $2 :: text)::jsonb, $3 :: text, $4 :: text, TRUE::bool);", + [value, id, event, topic] + ) + + {:ok, [%{id: message_id}]} = Repo.all(db_conn, from(m in Message), Message) assert_receive {:socket_push, :text, data}, 500 message = data |> IO.iodata_to_binary() |> Jason.decode!() assert %{ "event" => "broadcast", - "payload" => %{"event" => "INSERT", "payload" => ^payload, "type" => "broadcast"}, + "payload" => %{ + "meta" => %{"id" => ^message_id}, + "event" => "INSERT", + "payload" => %{"value" => "something", "id" => ^id}, + "type" => "broadcast" + }, "ref" => nil, "topic" => ^topic } = message @@ -272,7 +500,7 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do test "fails on existing replication slot", %{tenant: tenant} do {:ok, db_conn} = Database.connect(tenant, "realtime_test", :stop) - name = "supabase_realtime_messages_replication_slot_test" + name = @replication_slot_name Postgrex.query!(db_conn, "SELECT pg_create_logical_replication_slot($1, 'test_decoding')", [name]) @@ -331,6 +559,118 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do assert {:error, :max_wal_senders_reached} = ReplicationConnection.start(tenant, self()) end + + test "handles WAL pressure gracefully", %{tenant: tenant} do + {:ok, replication_pid} = ReplicationConnection.start(tenant, self()) + + {:ok, conn} = Database.connect(tenant, "realtime_test", :stop) + on_exit(fn -> Process.exit(conn, :normal) end) + + large_payload = String.duplicate("x", 10 * 1024 * 1024) + + for i <- 1..5 do + message_fixture_with_conn(tenant, conn, %{ + "topic" => "stress_#{i}", + "private" => true, + "event" => "INSERT", + "payload" => %{"data" => large_payload} + }) + end + + assert Process.alive?(replication_pid) + end + end + + describe "publication validation steps" do + test "if proper tables are included, starts replication", %{tenant: tenant, db_conn: db_conn} do + publication_name = "supabase_realtime_messages_publication" + + Postgrex.query!(db_conn, "DROP PUBLICATION IF EXISTS #{publication_name}", []) + Postgrex.query!(db_conn, "CREATE PUBLICATION #{publication_name} FOR TABLE realtime.messages", []) + + logs = + capture_log(fn -> + {:ok, pid} = ReplicationConnection.start(tenant, self()) + + assert_replication_started(db_conn, @replication_slot_name) + assert Process.alive?(pid) + assert_publication_contains_only_messages(db_conn, publication_name) + + Process.exit(pid, :shutdown) + end) + + refute logs =~ "Recreating" + end + + test "if includes unexpected tables, recreates publication", %{tenant: tenant, db_conn: db_conn} do + publication_name = "supabase_realtime_messages_publication" + + Postgrex.query!(db_conn, "DROP PUBLICATION IF EXISTS #{publication_name}", []) + Postgrex.query!(db_conn, "CREATE TABLE IF NOT EXISTS public.wrong_table (id int)", []) + Postgrex.query!(db_conn, "CREATE PUBLICATION #{publication_name} FOR TABLE public.wrong_table", []) + + logs = + capture_log(fn -> + {:ok, pid} = ReplicationConnection.start(tenant, self()) + + assert_replication_started(db_conn, @replication_slot_name) + assert Process.alive?(pid) + assert_publication_contains_only_messages(db_conn, publication_name) + + Process.exit(pid, :shutdown) + end) + + assert logs =~ "Recreating" + end + + test "recreates publication if it has no tables", %{tenant: tenant, db_conn: db_conn} do + publication_name = "supabase_realtime_messages_publication" + + Postgrex.query!(db_conn, "DROP PUBLICATION IF EXISTS #{publication_name}", []) + Postgrex.query!(db_conn, "CREATE PUBLICATION #{publication_name}", []) + + logs = + capture_log(fn -> + {:ok, pid} = ReplicationConnection.start(tenant, self()) + + assert_replication_started(db_conn, @replication_slot_name) + assert Process.alive?(pid) + assert_publication_contains_only_messages(db_conn, publication_name) + + Process.exit(pid, :shutdown) + end) + + assert logs =~ "Recreating" + end + + test "recreates publication if it has expected tables and unexpected tables under same publication", %{ + tenant: tenant, + db_conn: db_conn + } do + publication_name = "supabase_realtime_messages_publication" + + Postgrex.query!(db_conn, "DROP PUBLICATION IF EXISTS #{publication_name}", []) + Postgrex.query!(db_conn, "CREATE TABLE IF NOT EXISTS public.extra_table (id int)", []) + + Postgrex.query!( + db_conn, + "CREATE PUBLICATION #{publication_name} FOR TABLE realtime.messages, public.extra_table", + [] + ) + + logs = + capture_log(fn -> + {:ok, pid} = ReplicationConnection.start(tenant, self()) + + assert_replication_started(db_conn, @replication_slot_name) + assert Process.alive?(pid) + assert_publication_contains_only_messages(db_conn, publication_name) + + Process.exit(pid, :shutdown) + end) + + assert logs =~ "Recreating" + end end describe "whereis/1" do @@ -378,7 +718,7 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do "payload" => %{"value" => random_string()} }) - assert_receive {:socket_push, :text, data} + assert_receive {:socket_push, :text, data}, 500 message = data |> IO.iodata_to_binary() |> Jason.decode!() assert %{"event" => "broadcast", "payload" => _, "ref" => nil, "topic" => ^topic} = message @@ -409,4 +749,59 @@ defmodule Realtime.Tenants.ReplicationConnectionTest do ref = Process.monitor(pid) assert_receive {:DOWN, ^ref, :process, ^pid, _reason}, timeout end + + defp message_fixture_with_conn(_tenant, conn, override) do + create_attrs = %{ + "topic" => random_string(), + "extension" => "broadcast" + } + + override = override |> Enum.map(fn {k, v} -> {"#{k}", v} end) |> Map.new() + + {:ok, message} = + create_attrs + |> Map.merge(override) + |> TenantConnection.create_message(conn) + + message + end + + defp assert_publication_contains_only_messages(db_conn, publication_name) do + %{rows: rows} = + Postgrex.query!( + db_conn, + "SELECT schemaname, tablename FROM pg_publication_tables WHERE pubname = $1", + [publication_name] + ) + + valid_tables = + Enum.all?(rows, fn [schema, table] -> + schema == "realtime" and (table == "messages" or String.starts_with?(table, "messages_")) + end) + + assert valid_tables, "Expected only realtime.messages or its partitions, got: #{inspect(rows)}" + end + + defp assert_replication_started(db_conn, slot_name, retries \\ 10, interval_ms \\ 10) do + case check_replication_status(db_conn, slot_name, retries, interval_ms) do + :ok -> :ok + :error -> flunk("Replication slot #{slot_name} did not become active") + end + end + + defp check_replication_status(_db_conn, _slot_name, 0, _interval_ms), do: :error + + defp check_replication_status(db_conn, slot_name, retries_remaining, interval_ms) do + %{rows: rows} = + Postgrex.query!(db_conn, "SELECT active FROM pg_replication_slots WHERE slot_name = $1", [slot_name]) + + case rows do + [[true]] -> + :ok + + _ -> + Process.sleep(interval_ms) + check_replication_status(db_conn, slot_name, retries_remaining - 1, interval_ms) + end + end end diff --git a/test/realtime/repo_test.exs b/test/realtime/tenants/repo_test.exs similarity index 99% rename from test/realtime/repo_test.exs rename to test/realtime/tenants/repo_test.exs index 7d6841b01..697274494 100644 --- a/test/realtime/repo_test.exs +++ b/test/realtime/tenants/repo_test.exs @@ -1,10 +1,10 @@ -defmodule Realtime.RepoTest do +defmodule Realtime.Tenants.RepoTest do use Realtime.DataCase, async: true import Ecto.Query alias Realtime.Api.Message - alias Realtime.Repo + alias Realtime.Tenants.Repo alias Realtime.Database setup do diff --git a/test/realtime/tenants_test.exs b/test/realtime/tenants_test.exs index aefe0b86c..202facdb5 100644 --- a/test/realtime/tenants_test.exs +++ b/test/realtime/tenants_test.exs @@ -89,15 +89,6 @@ defmodule Realtime.TenantsTest do end end - describe "update_migrations_ran/1" do - test "updates migrations_ran to the count of all migrations" do - tenant = tenant_fixture(%{migrations_ran: 0}) - Tenants.update_migrations_ran(tenant.external_id, 1) - tenant = Repo.reload!(tenant) - assert tenant.migrations_ran == 1 - end - end - describe "broadcast_operation_event/2" do setup do tenant = tenant_fixture() diff --git a/test/realtime/user_counter_test.exs b/test/realtime/user_counter_test.exs index d93529764..f7725885d 100644 --- a/test/realtime/user_counter_test.exs +++ b/test/realtime/user_counter_test.exs @@ -3,6 +3,13 @@ defmodule Realtime.UsersCounterTest do alias Realtime.UsersCounter alias Realtime.Rpc + setup_all do + tenant_id = random_string() + count = generate_load(tenant_id) + + %{tenant_id: tenant_id, count: count, nodes: Node.list()} + end + describe "add/1" do test "starts counter for tenant" do assert UsersCounter.add(self(), random_string()) == :ok @@ -11,45 +18,111 @@ defmodule Realtime.UsersCounterTest do @aux_mod (quote do defmodule Aux do - def ping(), - do: - spawn(fn -> - Process.sleep(3000) - :pong - end) + def ping() do + spawn(fn -> Process.sleep(:infinity) end) + end + + def join(pid, group) do + UsersCounter.add(pid, group) + end end end) Code.eval_quoted(@aux_mod) + describe "tenant_counts/0" do + test "map of tenant and number of users", %{tenant_id: tenant_id, count: expected} do + assert UsersCounter.add(self(), tenant_id) == :ok + Process.sleep(1000) + counts = UsersCounter.tenant_counts() + + assert counts[tenant_id] == expected + 1 + assert map_size(counts) >= 61 + + counts = Beacon.local_member_counts(:users) + + assert counts[tenant_id] == 1 + assert map_size(counts) >= 1 + + counts = Beacon.member_counts(:users) + + assert counts[tenant_id] == expected + 1 + assert map_size(counts) >= 61 + end + end + + describe "tenant_counts/1" do + test "map of tenant and number of users for a node only", %{tenant_id: tenant_id, nodes: nodes} do + assert UsersCounter.add(self(), tenant_id) == :ok + Process.sleep(1000) + my_counts = UsersCounter.tenant_counts(Node.self()) + # Only one connection from this test process on this node + assert my_counts == %{tenant_id => 1} + + another_node_counts = UsersCounter.tenant_counts(hd(nodes)) + assert another_node_counts[tenant_id] == 2 + + assert map_size(another_node_counts) == 21 + + assert Beacon.local_member_counts(:users) == %{tenant_id => 1} + end + end + describe "tenant_users/1" do - test "returns count of connected clients for tenant on cluster node" do - tenant_id = random_string() - expected = generate_load(tenant_id) + test "returns count of connected clients for tenant on cluster node", %{tenant_id: tenant_id, count: expected} do Process.sleep(1000) assert UsersCounter.tenant_users(tenant_id) == expected end end describe "tenant_users/2" do - test "returns count of connected clients for tenant on target cluster" do - tenant_id = random_string() - generate_load(tenant_id) - {:ok, node} = Clustered.start(@aux_mod) - pid = Rpc.call(node, Aux, :ping, []) - UsersCounter.add(pid, tenant_id) - assert UsersCounter.tenant_users(node, tenant_id) == 1 + test "returns count of connected clients for tenant on target cluster", %{tenant_id: tenant_id, nodes: nodes} do + node = hd(nodes) + assert UsersCounter.tenant_users(node, tenant_id) == 2 + + assert Beacon.member_count(:users, tenant_id, node) == 2 end end - defp generate_load(tenant_id, nodes \\ 2, processes \\ 2) do - for i <- 1..nodes do + defp generate_load(tenant_id) do + processes = 2 + + nodes = %{ + :"main@127.0.0.1" => 5969, + :"us_node@127.0.0.1" => 16980, + :"ap2_nodeX@127.0.0.1" => 16981, + :"ap2_nodeY@127.0.0.1" => 16982 + } + + regions = %{ + :"us_node@127.0.0.1" => "us-east-1", + :"ap2_nodeX@127.0.0.1" => "ap-southeast-2", + :"ap2_nodeY@127.0.0.1" => "ap-southeast-2" + } + + on_exit(fn -> Application.put_env(:gen_rpc, :client_config_per_node, {:internal, %{}}) end) + Application.put_env(:gen_rpc, :client_config_per_node, {:internal, nodes}) + + nodes + |> Enum.filter(fn {node, _port} -> node != Node.self() end) + |> Enum.with_index(1) + |> Enum.each(fn {{node, gen_rpc_port}, i} -> # Avoid port collision extra_config = [ - {:gen_rpc, :tcp_server_port, 15970 + i} + {:gen_rpc, :tcp_server_port, gen_rpc_port}, + {:gen_rpc, :client_config_per_node, {:internal, nodes}}, + {:realtime, :users_scope_broadcast_interval_in_ms, 100}, + {:realtime, :region, regions[node]} ] - {:ok, node} = Clustered.start(@aux_mod, extra_config: extra_config, phoenix_port: 4012 + i) + node_name = + node + |> to_string() + |> String.split("@") + |> hd() + |> String.to_atom() + + {:ok, node} = Clustered.start(@aux_mod, name: node_name, extra_config: extra_config, phoenix_port: 4012 + i) for _ <- 1..processes do pid = Rpc.call(node, Aux, :ping, []) @@ -57,18 +130,17 @@ defmodule Realtime.UsersCounterTest do for _ <- 1..10 do # replicate same pid added multiple times concurrently Task.start(fn -> - UsersCounter.add(pid, tenant_id) + Rpc.call(node, Aux, :join, [pid, tenant_id]) end) # noisy neighbors to test handling of bigger loads on concurrent calls Task.start(fn -> - pid = Rpc.call(node, Aux, :ping, []) - UsersCounter.add(pid, random_string()) + Rpc.call(node, Aux, :join, [pid, random_string()]) end) end end - end + end) - nodes * processes + 3 * processes end end diff --git a/test/realtime_web/channels/payloads/join_test.exs b/test/realtime_web/channels/payloads/join_test.exs index 32bf1b397..f02c2a73d 100644 --- a/test/realtime_web/channels/payloads/join_test.exs +++ b/test/realtime_web/channels/payloads/join_test.exs @@ -6,6 +6,7 @@ defmodule RealtimeWeb.Channels.Payloads.JoinTest do alias RealtimeWeb.Channels.Payloads.Join alias RealtimeWeb.Channels.Payloads.Config alias RealtimeWeb.Channels.Payloads.Broadcast + alias RealtimeWeb.Channels.Payloads.Broadcast.Replay alias RealtimeWeb.Channels.Payloads.Presence alias RealtimeWeb.Channels.Payloads.PostgresChange @@ -17,7 +18,7 @@ defmodule RealtimeWeb.Channels.Payloads.JoinTest do config = %{ "config" => %{ "private" => false, - "broadcast" => %{"ack" => false, "self" => false}, + "broadcast" => %{"ack" => false, "self" => false, "replay" => %{"since" => 1, "limit" => 10}}, "presence" => %{"enabled" => true, "key" => key}, "postgres_changes" => [ %{"event" => "INSERT", "schema" => "public", "table" => "users", "filter" => "id=eq.1"}, @@ -37,8 +38,9 @@ defmodule RealtimeWeb.Channels.Payloads.JoinTest do postgres_changes: postgres_changes } = config - assert %Broadcast{ack: false, self: false} = broadcast + assert %Broadcast{ack: false, self: false, replay: replay} = broadcast assert %Presence{enabled: true, key: ^key} = presence + assert %Replay{since: 1, limit: 10} = replay assert [ %PostgresChange{event: "INSERT", schema: "public", table: "users", filter: "id=eq.1"}, @@ -56,6 +58,25 @@ defmodule RealtimeWeb.Channels.Payloads.JoinTest do assert is_binary(key) end + test "presence key can be number" do + config = %{"config" => %{"presence" => %{"enabled" => true, "key" => 123}}} + + assert {:ok, %Join{config: %Config{presence: %Presence{key: key}}}} = Join.validate(config) + + assert key == 123 + end + + test "invalid replay" do + config = %{"config" => %{"broadcast" => %{"replay" => 123}}} + + assert { + :error, + :invalid_join_payload, + %{config: %{broadcast: %{replay: ["unable to parse, expected a map"]}}} + } = + Join.validate(config) + end + test "missing enabled presence defaults to true" do config = %{"config" => %{"presence" => %{}}} @@ -92,5 +113,11 @@ defmodule RealtimeWeb.Channels.Payloads.JoinTest do user_token: ["unable to parse, expected string"] } end + + test "handles postgres changes with nil value in array as empty array" do + config = %{"config" => %{"postgres_changes" => [nil]}} + + assert {:ok, %Join{config: %Config{postgres_changes: []}}} = Join.validate(config) + end end end diff --git a/test/realtime_web/channels/realtime_channel/broadcast_handler_test.exs b/test/realtime_web/channels/realtime_channel/broadcast_handler_test.exs index 2cd7005df..b2aa9b90e 100644 --- a/test/realtime_web/channels/realtime_channel/broadcast_handler_test.exs +++ b/test/realtime_web/channels/realtime_channel/broadcast_handler_test.exs @@ -1,5 +1,8 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do - use Realtime.DataCase, async: true + use Realtime.DataCase, + async: true, + parameterize: [%{serializer: Phoenix.Socket.V1.JSONSerializer}, %{serializer: RealtimeWeb.Socket.V2Serializer}] + use Mimic import Generators @@ -17,26 +20,27 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do setup [:initiate_tenant] + @payload %{"a" => "b"} + describe "handle/3" do - test "with write true policy, user is able to send message", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "with write true policy, user is able to send message", + %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic, policies: %Policies{broadcast: %BroadcastPolicies{write: true}}) for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end - Process.sleep(120) - for _ <- 1..100 do topic = "realtime:#{topic}" assert_receive {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + + assert Jason.decode!(data) == message(serializer, topic, @payload) end - {:ok, %{avg: avg, bucket: buckets}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, %{avg: avg, bucket: buckets}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert Enum.sum(buckets) == 100 assert avg > 0 end @@ -50,40 +54,37 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do socket end - Process.sleep(120) - refute_received _any - {:ok, %{avg: avg}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, %{avg: avg}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert avg == 0.0 end @tag policies: [:authenticated_read_broadcast, :authenticated_write_broadcast] - test "with nil policy but valid user, is able to send message", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "with nil policy but valid user, is able to send message", + %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic) for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end - Process.sleep(120) - for _ <- 1..100 do topic = "realtime:#{topic}" assert_received {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end - {:ok, %{avg: avg, bucket: buckets}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, %{avg: avg, bucket: buckets}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert Enum.sum(buckets) == 100 assert avg > 0.0 end @tag policies: [:authenticated_read_matching_user_sub, :authenticated_write_matching_user_sub], sub: UUID.generate() - test "with valid sub, is able to send message", %{topic: topic, tenant: tenant, db_conn: db_conn, sub: sub} do + test "with valid sub, is able to send message", + %{topic: topic, tenant: tenant, db_conn: db_conn, sub: sub, serializer: serializer} do socket = socket_fixture(tenant, topic, policies: %Policies{broadcast: %BroadcastPolicies{write: nil, read: true}}, @@ -92,17 +93,14 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end - Process.sleep(120) - for _ <- 1..100 do topic = "realtime:#{topic}" assert_received {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end end @@ -120,13 +118,12 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do socket end - Process.sleep(120) - - refute_received {:socket_push, :text, _} + refute_receive {:socket_push, :text, _}, 120 end @tag policies: [:read_matching_user_role, :write_matching_user_role], role: "anon" - test "with valid role, is able to send message", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "with valid role, is able to send message", + %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic, policies: %Policies{broadcast: %BroadcastPolicies{write: nil, read: true}}, @@ -135,17 +132,14 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end - Process.sleep(120) - for _ <- 1..100 do topic = "realtime:#{topic}" assert_received {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end end @@ -163,9 +157,7 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do socket end - Process.sleep(120) - - refute_received {:socket_push, :text, _} + refute_receive {:socket_push, :text, _}, 120 end test "with nil policy and invalid user, won't send message", %{topic: topic, tenant: tenant, db_conn: db_conn} do @@ -177,16 +169,15 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do socket end - Process.sleep(120) - refute_received _any - {:ok, %{avg: avg}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, %{avg: avg}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert avg == 0.0 end @tag policies: [:authenticated_read_broadcast, :authenticated_write_broadcast] - test "validation only runs once on nil and valid policies", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "validation only runs once on nil and valid policies", + %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic) expect(Authorization, :get_write_authorizations, 1, fn conn, db_conn, auth_context -> @@ -197,15 +188,14 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end for _ <- 1..100 do topic = "realtime:#{topic}" assert_receive {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end end @@ -222,12 +212,10 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do socket end - Process.sleep(100) - - refute_received _ + refute_receive _, 100 end - test "no ack still sends message", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "no ack still sends message", %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic, policies: %Policies{broadcast: %BroadcastPolicies{write: true}}, @@ -236,7 +224,7 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do for _ <- 1..100, reduce: socket do socket -> - {:noreply, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:noreply, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end @@ -245,56 +233,128 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do for _ <- 1..100 do topic = "realtime:#{topic}" assert_received {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end end - test "public channels are able to send messages", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "public channels are able to send messages", + %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic, private?: false, policies: nil) for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end - Process.sleep(120) - for _ <- 1..100 do topic = "realtime:#{topic}" assert_received {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end - {:ok, %{avg: avg, bucket: buckets}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, %{avg: avg, bucket: buckets}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert Enum.sum(buckets) == 100 assert avg > 0.0 end - test "public channels are able to send messages and ack", %{topic: topic, tenant: tenant, db_conn: db_conn} do + test "public channels are able to send messages and ack", + %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do socket = socket_fixture(tenant, topic, private?: false, policies: nil) for _ <- 1..100, reduce: socket do socket -> - {:reply, :ok, socket} = BroadcastHandler.handle(%{"a" => "b"}, db_conn, socket) + {:reply, :ok, socket} = BroadcastHandler.handle(@payload, db_conn, socket) socket end for _ <- 1..100 do topic = "realtime:#{topic}" assert_receive {:socket_push, :text, data} - message = data |> IO.iodata_to_binary() |> Jason.decode!() - assert message == %{"event" => "broadcast", "payload" => %{"a" => "b"}, "ref" => nil, "topic" => topic} + assert Jason.decode!(data) == message(serializer, topic, @payload) end - Process.sleep(120) - {:ok, %{avg: avg, bucket: buckets}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, %{avg: avg, bucket: buckets}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert Enum.sum(buckets) == 100 assert avg > 0.0 end + test "V2 json UserBroadcastPush", %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do + socket = socket_fixture(tenant, topic, private?: false, policies: nil) + + user_broadcast_payload = %{"a" => "b"} + json_encoded_user_broadcast_payload = Jason.encode!(user_broadcast_payload) + + {:reply, :ok, _socket} = + BroadcastHandler.handle({"event123", :json, json_encoded_user_broadcast_payload, %{}}, db_conn, socket) + + topic = "realtime:#{topic}" + assert_receive {:socket_push, code, data} + + if serializer == RealtimeWeb.Socket.V2Serializer do + assert code == :binary + + assert data == + << + # user broadcast = 4 + 4::size(8), + # topic_size + byte_size(topic), + # user_event_size + byte_size("event123"), + # metadata_size + 0, + # json encoding + 1::size(8), + topic::binary, + "event123" + >> <> json_encoded_user_broadcast_payload + else + assert code == :text + + assert Jason.decode!(data) == + message(serializer, topic, %{ + "event" => "event123", + "payload" => user_broadcast_payload, + "type" => "broadcast" + }) + end + end + + test "V2 binary UserBroadcastPush", %{topic: topic, tenant: tenant, db_conn: db_conn, serializer: serializer} do + socket = socket_fixture(tenant, topic, private?: false, policies: nil) + + user_broadcast_payload = <<123, 456, 789>> + + {:reply, :ok, _socket} = + BroadcastHandler.handle({"event123", :binary, user_broadcast_payload, %{}}, db_conn, socket) + + topic = "realtime:#{topic}" + + if serializer == RealtimeWeb.Socket.V2Serializer do + assert_receive {:socket_push, :binary, data} + + assert data == + << + # user broadcast = 4 + 4::size(8), + # topic_size + byte_size(topic), + # user_event_size + byte_size("event123"), + # metadata_size + 0, + # binary encoding + 0::size(8), + topic::binary, + "event123" + >> <> user_broadcast_payload + else + # Can't receive binary payloads on V1 serializer + refute_receive {:socket_push, _code, _data} + end + end + @tag policies: [:broken_write_presence] test "handle failing rls policy", %{topic: topic, tenant: tenant, db_conn: db_conn} do socket = socket_fixture(tenant, topic) @@ -303,14 +363,81 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do capture_log(fn -> {:noreply, _socket} = BroadcastHandler.handle(%{}, db_conn, socket) - # Enough for the RateCounter to calculate the last bucket - refute_received _, 1200 + {:ok, %{avg: avg}} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) + assert avg == 0.0 + + refute_receive _, 200 end) assert log =~ "RlsPolicyError" + end - {:ok, %{avg: avg}} = RateCounter.get(Tenants.events_per_second_rate(tenant)) - assert avg == 0.0 + test "handle payload size excedding limits in private channels", %{topic: topic, tenant: tenant, db_conn: db_conn} do + socket = + socket_fixture(tenant, topic, + policies: %Policies{broadcast: %BroadcastPolicies{write: true}}, + ack_broadcast: false + ) + + assert {:noreply, _} = + BroadcastHandler.handle( + %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 1)}, + db_conn, + socket + ) + + refute_receive {:socket_push, :text, _}, 120 + end + + test "handle payload size excedding limits in public channels", %{topic: topic, tenant: tenant, db_conn: db_conn} do + socket = socket_fixture(tenant, topic, ack_broadcast: false, private?: false) + + assert {:noreply, _} = + BroadcastHandler.handle( + %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 1)}, + db_conn, + socket + ) + + refute_receive {:socket_push, :text, _}, 120 + end + + test "handle payload size excedding limits in private channel and if ack it will receive error", %{ + topic: topic, + tenant: tenant, + db_conn: db_conn + } do + socket = + socket_fixture(tenant, topic, + policies: %Policies{broadcast: %BroadcastPolicies{write: true}}, + ack_broadcast: true + ) + + assert {:reply, {:error, :payload_size_exceeded}, _} = + BroadcastHandler.handle( + %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 1)}, + db_conn, + socket + ) + + refute_receive {:socket_push, :text, _}, 120 + end + + test "handle payload size excedding limits in public channels and if ack it will receive error", %{ + topic: topic, + tenant: tenant, + db_conn: db_conn + } do + socket = socket_fixture(tenant, topic, ack_broadcast: true, private?: false) + + assert {:reply, {:error, :payload_size_exceeded}, _} = + BroadcastHandler.handle( + %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 1)}, + db_conn, + socket + ) + + refute_receive {:socket_push, :text, _}, 120 end end @@ -318,7 +445,7 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) rate = Tenants.events_per_second_rate(tenant) RateCounter.new(rate, tick: 100) @@ -331,7 +458,7 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do fastlane = RealtimeWeb.RealtimeChannel.MessageDispatcher.fastlane_metadata( self(), - Phoenix.Socket.V1.JSONSerializer, + context.serializer, "realtime:#{topic}", :warning, "tenant_id" @@ -389,4 +516,10 @@ defmodule RealtimeWeb.RealtimeChannel.BroadcastHandlerTest do } } end + + defp message(RealtimeWeb.Socket.V2Serializer, topic, payload), do: [nil, nil, topic, "broadcast", payload] + + defp message(Phoenix.Socket.V1.JSONSerializer, topic, payload) do + %{"event" => "broadcast", "payload" => payload, "ref" => nil, "topic" => topic} + end end diff --git a/test/realtime_web/channels/realtime_channel/logging_test.exs b/test/realtime_web/channels/realtime_channel/logging_test.exs index 92634daef..cd131d16e 100644 --- a/test/realtime_web/channels/realtime_channel/logging_test.exs +++ b/test/realtime_web/channels/realtime_channel/logging_test.exs @@ -37,6 +37,7 @@ defmodule RealtimeWeb.RealtimeChannel.LoggingTest do assert log =~ "sub=#{sub}" assert log =~ "exp=#{exp}" assert log =~ "iss=#{iss}" + assert log =~ "error_code=TestError" end end @@ -57,6 +58,7 @@ defmodule RealtimeWeb.RealtimeChannel.LoggingTest do assert log =~ "sub=#{sub}" assert log =~ "exp=#{exp}" assert log =~ "iss=#{iss}" + assert log =~ "error_code=TestWarning" end end @@ -67,10 +69,14 @@ defmodule RealtimeWeb.RealtimeChannel.LoggingTest do for log_level <- log_levels do socket = %{assigns: %{log_level: log_level, tenant: random_string(), access_token: "test_token"}} - assert capture_log(fn -> - assert Logging.maybe_log_error(socket, "TestCode", "test message") == - {:error, %{reason: "TestCode: test message"}} - end) =~ "TestCode: test message" + log = + capture_log(fn -> + assert Logging.maybe_log_error(socket, "TestCode", "test message") == + {:error, %{reason: "TestCode: test message"}} + end) + + assert log =~ "TestCode: test message" + assert log =~ "error_code=TestCode" assert capture_log(fn -> assert Logging.maybe_log_error(socket, "TestCode", %{a: "b"}) == @@ -103,11 +109,14 @@ defmodule RealtimeWeb.RealtimeChannel.LoggingTest do for log_level <- log_levels do socket = %{assigns: %{log_level: log_level, tenant: random_string(), access_token: "test_token"}} - assert capture_log(fn -> - assert Logging.maybe_log_warning(socket, "TestCode", "test message") == - {:error, %{reason: "TestCode: test message"}} - end) =~ - "TestCode: test message" + log = + capture_log(fn -> + assert Logging.maybe_log_warning(socket, "TestCode", "test message") == + {:error, %{reason: "TestCode: test message"}} + end) + + assert log =~ "TestCode: test message" + assert log =~ "error_code=TestCode" assert capture_log(fn -> assert Logging.maybe_log_warning(socket, "TestCode", %{a: "b"}) == diff --git a/test/realtime_web/channels/realtime_channel/message_dispatcher_test.exs b/test/realtime_web/channels/realtime_channel/message_dispatcher_test.exs index 7a9e2eb25..834cf7ad8 100644 --- a/test/realtime_web/channels/realtime_channel/message_dispatcher_test.exs +++ b/test/realtime_web/channels/realtime_channel/message_dispatcher_test.exs @@ -4,7 +4,10 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcherTest do import ExUnit.CaptureLog alias Phoenix.Socket.Broadcast + alias Phoenix.Socket.V1 alias RealtimeWeb.RealtimeChannel.MessageDispatcher + alias RealtimeWeb.Socket.UserBroadcast + alias RealtimeWeb.Socket.V2Serializer defmodule TestSerializer do def fastlane!(msg) do @@ -16,18 +19,35 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcherTest do describe "fastlane_metadata/5" do test "info level" do assert MessageDispatcher.fastlane_metadata(self(), Serializer, "realtime:topic", :info, "tenant_id") == - {:realtime_channel_fastlane, self(), Serializer, "realtime:topic", {:log, "tenant_id"}} + {:rc_fastlane, self(), Serializer, "realtime:topic", :info, "tenant_id", MapSet.new()} end test "non-info level" do assert MessageDispatcher.fastlane_metadata(self(), Serializer, "realtime:topic", :warning, "tenant_id") == - {:realtime_channel_fastlane, self(), Serializer, "realtime:topic"} + {:rc_fastlane, self(), Serializer, "realtime:topic", :warning, "tenant_id", MapSet.new()} + end + + test "replayed message ids" do + assert MessageDispatcher.fastlane_metadata( + self(), + Serializer, + "realtime:topic", + :warning, + "tenant_id", + MapSet.new([1]) + ) == + {:rc_fastlane, self(), Serializer, "realtime:topic", :warning, "tenant_id", MapSet.new([1])} end end describe "dispatch/3" do setup do - {:ok, _pid} = Agent.start_link(fn -> 0 end, name: TestSerializer) + {:ok, _pid} = + start_supervised(%{ + id: TestSerializer, + start: {Agent, :start_link, [fn -> 0 end, [name: TestSerializer]]} + }) + :ok end @@ -50,12 +70,11 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcherTest do from_pid = :erlang.list_to_pid(~c'<0.2.1>') subscribers = [ - {subscriber_pid, {:realtime_channel_fastlane, self(), TestSerializer, "realtime:topic", {:log, "tenant123"}}}, - {subscriber_pid, {:realtime_channel_fastlane, self(), TestSerializer, "realtime:topic"}} + {subscriber_pid, {:rc_fastlane, self(), TestSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), TestSerializer, "realtime:topic", :warning, "tenant123", MapSet.new()}} ] msg = %Broadcast{topic: "some:other:topic", event: "event", payload: %{data: "test"}} - require Logger log = capture_log(fn -> @@ -75,6 +94,130 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcherTest do refute_receive _any end + test "dispatches 'presence_diff' messages to fastlane subscribers" do + parent = self() + + subscriber_pid = + spawn(fn -> + loop = fn loop -> + receive do + msg -> + send(parent, {:subscriber, msg}) + loop.(loop) + end + end + + loop.(loop) + end) + + from_pid = :erlang.list_to_pid(~c'<0.2.1>') + + subscribers = [ + {subscriber_pid, {:rc_fastlane, self(), TestSerializer, "realtime:topic", :info, "tenant456", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), TestSerializer, "realtime:topic", :warning, "tenant456", MapSet.new()}} + ] + + msg = %Broadcast{topic: "some:other:topic", event: "presence_diff", payload: %{data: "test"}} + + log = + capture_log(fn -> + assert MessageDispatcher.dispatch(subscribers, from_pid, msg) == :ok + end) + + assert log =~ "Received message on realtime:topic with payload: #{inspect(msg, pretty: true)}" + + assert_receive {:encoded, %Broadcast{event: "presence_diff", payload: %{data: "test"}, topic: "realtime:topic"}} + assert_receive {:encoded, %Broadcast{event: "presence_diff", payload: %{data: "test"}, topic: "realtime:topic"}} + + assert Agent.get(TestSerializer, & &1) == 1 + + assert Realtime.GenCounter.get(Realtime.Tenants.presence_events_per_second_key("tenant456")) == 2 + + refute_receive _any + end + + test "does not dispatch messages to fastlane subscribers if they already replayed it" do + parent = self() + + subscriber_pid = + spawn(fn -> + loop = fn loop -> + receive do + msg -> + send(parent, {:subscriber, msg}) + loop.(loop) + end + end + + loop.(loop) + end) + + from_pid = :erlang.list_to_pid(~c'<0.2.1>') + replaeyd_message_ids = MapSet.new(["123"]) + + subscribers = [ + {subscriber_pid, + {:rc_fastlane, self(), TestSerializer, "realtime:topic", :info, "tenant123", replaeyd_message_ids}}, + {subscriber_pid, + {:rc_fastlane, self(), TestSerializer, "realtime:topic", :warning, "tenant123", replaeyd_message_ids}} + ] + + msg = %Broadcast{ + topic: "some:other:topic", + event: "event", + payload: %{"data" => "test", "meta" => %{"id" => "123"}} + } + + assert MessageDispatcher.dispatch(subscribers, from_pid, msg) == :ok + + assert Agent.get(TestSerializer, & &1) == 0 + + refute_receive _any + end + + test "payload is not a map" do + parent = self() + + subscriber_pid = + spawn(fn -> + loop = fn loop -> + receive do + msg -> + send(parent, {:subscriber, msg}) + loop.(loop) + end + end + + loop.(loop) + end) + + from_pid = :erlang.list_to_pid(~c'<0.2.1>') + + subscribers = [ + {subscriber_pid, {:rc_fastlane, self(), TestSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), TestSerializer, "realtime:topic", :warning, "tenant123", MapSet.new()}} + ] + + msg = %Broadcast{topic: "some:other:topic", event: "event", payload: "not a map"} + + log = + capture_log(fn -> + assert MessageDispatcher.dispatch(subscribers, from_pid, msg) == :ok + end) + + assert log =~ "Received message on realtime:topic with payload: #{inspect(msg, pretty: true)}" + + assert_receive {:encoded, %Broadcast{event: "event", payload: "not a map", topic: "realtime:topic"}} + assert_receive {:encoded, %Broadcast{event: "event", payload: "not a map", topic: "realtime:topic"}} + + assert Agent.get(TestSerializer, & &1) == 1 + + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + + refute_receive _any + end + test "dispatches messages to non fastlane subscribers" do from_pid = :erlang.list_to_pid(~c'<0.2.1>') @@ -93,5 +236,236 @@ defmodule RealtimeWeb.RealtimeChannel.MessageDispatcherTest do # TestSerializer is not called assert Agent.get(TestSerializer, & &1) == 0 end + + test "dispatches Broadcast to V1 & V2 Serializers" do + parent = self() + + subscriber_pid = + spawn(fn -> + loop = fn loop -> + receive do + msg -> + send(parent, {:subscriber, msg}) + loop.(loop) + end + end + + loop.(loop) + end) + + from_pid = :erlang.list_to_pid(~c'<0.2.1>') + + subscribers = [ + {subscriber_pid, {:rc_fastlane, self(), V1.JSONSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V1.JSONSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V2Serializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V2Serializer, "realtime:topic", :info, "tenant123", MapSet.new()}} + ] + + msg = %Broadcast{topic: "some:other:topic", event: "event", payload: %{data: "test"}} + + log = + capture_log(fn -> + assert MessageDispatcher.dispatch(subscribers, from_pid, msg) == :ok + end) + + assert log =~ "Received message on realtime:topic with payload: #{inspect(msg, pretty: true)}" + + # Receive 2 messages using V1 + assert_receive {:socket_push, :text, message_v1} + assert_receive {:socket_push, :text, ^message_v1} + + assert Jason.decode!(message_v1) == %{ + "event" => "event", + "payload" => %{"data" => "test"}, + "ref" => nil, + "topic" => "realtime:topic" + } + + # Receive 2 messages using V2 + assert_receive {:socket_push, :text, message_v2} + assert_receive {:socket_push, :text, ^message_v2} + + # V2 is an array format + assert Jason.decode!(message_v2) == [nil, nil, "realtime:topic", "event", %{"data" => "test"}] + + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + + refute_receive _any + end + + test "dispatches json UserBroadcast to V1 & V2 Serializers" do + parent = self() + + subscriber_pid = + spawn(fn -> + loop = fn loop -> + receive do + msg -> + send(parent, {:subscriber, msg}) + loop.(loop) + end + end + + loop.(loop) + end) + + from_pid = :erlang.list_to_pid(~c'<0.2.1>') + + subscribers = [ + {subscriber_pid, {:rc_fastlane, self(), V1.JSONSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V1.JSONSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V2Serializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V2Serializer, "realtime:topic", :info, "tenant123", MapSet.new()}} + ] + + user_payload = Jason.encode!(%{data: "test"}) + + msg = %UserBroadcast{ + topic: "some:other:topic", + user_event: "event123", + user_payload: user_payload, + user_payload_encoding: :json, + metadata: %{"id" => "123", "replayed" => true} + } + + log = + capture_log(fn -> + assert MessageDispatcher.dispatch(subscribers, from_pid, msg) == :ok + end) + + assert log =~ "Received message on realtime:topic with payload: #{inspect(msg, pretty: true)}" + + # Receive 2 messages using V1 + assert_receive {:socket_push, :text, message_v1} + assert_receive {:socket_push, :text, ^message_v1} + + assert Jason.decode!(message_v1) == %{ + "event" => "broadcast", + "payload" => %{ + "event" => "event123", + "meta" => %{"id" => "123", "replayed" => true}, + "payload" => %{"data" => "test"}, + "type" => "broadcast" + }, + "ref" => nil, + "topic" => "realtime:topic" + } + + # Receive 2 messages using V2 + assert_receive {:socket_push, :binary, message_v2} + assert_receive {:socket_push, :binary, ^message_v2} + + encoded_metadata = Jason.encode!(%{"id" => "123", "replayed" => true}) + metadata_size = byte_size(encoded_metadata) + + # binary payload structure + assert message_v2 == + << + # user broadcast = 4 + 4::size(8), + # topic_size + 14, + # user_event_size + 8, + # metadata_size + metadata_size, + # json encoding + 1::size(8), + "realtime:topic", + "event123" + >> <> encoded_metadata <> user_payload + + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + + refute_receive _any + end + + test "dispatches binary UserBroadcast to V1 & V2 Serializers" do + parent = self() + + subscriber_pid = + spawn(fn -> + loop = fn loop -> + receive do + msg -> + send(parent, {:subscriber, msg}) + loop.(loop) + end + end + + loop.(loop) + end) + + from_pid = :erlang.list_to_pid(~c'<0.2.1>') + + subscribers = [ + {subscriber_pid, {:rc_fastlane, self(), V1.JSONSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V1.JSONSerializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V2Serializer, "realtime:topic", :info, "tenant123", MapSet.new()}}, + {subscriber_pid, {:rc_fastlane, self(), V2Serializer, "realtime:topic", :info, "tenant123", MapSet.new()}} + ] + + user_payload = <<123, 456, 789>> + + msg = %UserBroadcast{ + topic: "some:other:topic", + user_event: "event123", + user_payload: user_payload, + user_payload_encoding: :binary, + metadata: %{"id" => "123", "replayed" => true} + } + + log = + capture_log(fn -> + assert MessageDispatcher.dispatch(subscribers, from_pid, msg) == :ok + end) + + assert log =~ "Received message on realtime:topic with payload: #{inspect(msg, pretty: true)}" + assert log =~ "User payload encoding is not JSON" + + # Only prints once + assert String.split(log, "User payload encoding is not JSON") |> length() == 2 + + # No V1 message received as binary payloads are not supported + refute_receive {:socket_push, :text, _message_v1} + + # Receive 2 messages using V2 + assert_receive {:socket_push, :binary, message_v2} + assert_receive {:socket_push, :binary, ^message_v2} + + encoded_metadata = Jason.encode!(%{"id" => "123", "replayed" => true}) + metadata_size = byte_size(encoded_metadata) + + # binary payload structure + assert message_v2 == + << + # user broadcast = 4 + 4::size(8), + # topic_size + 14, + # user_event_size + 8, + # metadata_size + metadata_size, + # binary encoding + 0::size(8), + "realtime:topic", + "event123" + >> <> encoded_metadata <> user_payload + + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + assert_receive {:subscriber, :update_rate_counter} + + refute_receive _any + end end end diff --git a/test/realtime_web/channels/realtime_channel/presence_handler_test.exs b/test/realtime_web/channels/realtime_channel/presence_handler_test.exs index e5ecd32ad..1ef635838 100644 --- a/test/realtime_web/channels/realtime_channel/presence_handler_test.exs +++ b/test/realtime_web/channels/realtime_channel/presence_handler_test.exs @@ -99,26 +99,42 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do end end - describe "handle/2" do + describe "handle/3" do + setup %{tenant: tenant} do + on_exit(fn -> :telemetry.detach(__MODULE__) end) + + :telemetry.attach( + __MODULE__, + [:realtime, :tenants, :payload, :size], + &__MODULE__.handle_telemetry/4, + %{pid: self(), tenant: tenant} + ) + end + test "with true policy and is private, user can track their presence and changes", %{ tenant: tenant, topic: topic, db_conn: db_conn } do + external_id = tenant.external_id key = random_string() policies = %Policies{presence: %PresencePolicies{read: true, write: true}} socket = socket_fixture(tenant, topic, key, policies: policies) - PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) + PresenceHandler.handle(%{"event" => "track", "payload" => %{"A" => "b", "c" => "b"}}, db_conn, socket) topic = socket.assigns.tenant_topic assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} assert Map.has_key?(joins, key) + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 30}, + %{tenant: ^external_id, message_type: :presence}} end test "when tracking already existing user, metadata updated", %{tenant: tenant, topic: topic, db_conn: db_conn} do + external_id = tenant.external_id key = random_string() policies = %Policies{presence: %PresencePolicies{read: true, write: true}} socket = socket_fixture(tenant, topic, key, policies: policies) @@ -134,19 +150,87 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} assert Map.has_key?(joins, key) - refute_receive :_ + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 6}, + %{tenant: ^external_id, message_type: :presence}} + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 55}, + %{tenant: ^external_id, message_type: :presence}} + + refute_receive _ + end + + test "tracking the same payload does nothing", %{tenant: tenant, topic: topic, db_conn: db_conn} do + external_id = tenant.external_id + key = random_string() + policies = %Policies{presence: %PresencePolicies{read: true, write: true}} + socket = socket_fixture(tenant, topic, key, policies: policies) + + assert {:ok, socket} = PresenceHandler.handle(%{"event" => "track", "payload" => %{"a" => "b"}}, db_conn, socket) + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 18}, + %{tenant: ^external_id, message_type: :presence}} + + topic = socket.assigns.tenant_topic + assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} + assert Map.has_key?(joins, key) + + assert {:ok, _socket} = + PresenceHandler.handle(%{"event" => "track", "payload" => %{"a" => "b"}}, db_conn, socket) + + refute_receive _ + end + + test "tracking, untracking and then tracking the same payload emit events", context do + %{tenant: tenant, topic: topic, db_conn: db_conn} = context + external_id = tenant.external_id + key = random_string() + policies = %Policies{presence: %PresencePolicies{read: true, write: true}} + socket = socket_fixture(tenant, topic, key, policies: policies) + + assert {:ok, socket} = PresenceHandler.handle(%{"event" => "track", "payload" => %{"a" => "b"}}, db_conn, socket) + assert socket.assigns.presence_track_payload == %{"a" => "b"} + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 18}, + %{tenant: ^external_id, message_type: :presence}} + + topic = socket.assigns.tenant_topic + assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} + assert %{^key => %{metas: [%{:phx_ref => _, "a" => "b"}]}} = joins + + assert {:ok, socket} = PresenceHandler.handle(%{"event" => "untrack"}, db_conn, socket) + assert socket.assigns.presence_track_payload == nil + + assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: %{}, leaves: leaves}} + assert %{^key => %{metas: [%{:phx_ref => _, "a" => "b"}]}} = leaves + + assert {:ok, socket} = PresenceHandler.handle(%{"event" => "track", "payload" => %{"a" => "b"}}, db_conn, socket) + + assert socket.assigns.presence_track_payload == %{"a" => "b"} + + assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} + assert %{^key => %{metas: [%{:phx_ref => _, "a" => "b"}]}} = joins + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 18}, + %{tenant: ^external_id, message_type: :presence}} + + refute_receive _ end test "with false policy and is public, user can track their presence and changes", %{tenant: tenant, topic: topic} do + external_id = tenant.external_id key = random_string() policies = %Policies{presence: %PresencePolicies{read: false, write: false}} socket = socket_fixture(tenant, topic, key, policies: policies, private?: false) - assert {:ok, _socket} = PresenceHandler.handle(%{"event" => "track"}, socket) + assert {:ok, _socket} = PresenceHandler.handle(%{"event" => "track"}, nil, socket) topic = socket.assigns.tenant_topic assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} assert Map.has_key?(joins, key) + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 6}, + %{tenant: ^external_id, message_type: :presence}} end test "user can untrack when they want", %{tenant: tenant, topic: topic, db_conn: db_conn} do @@ -229,6 +313,7 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do assert {:ok, socket} = PresenceHandler.handle( %{"event" => "track", "payload" => %{"metadata" => random_string()}}, + nil, socket ) @@ -248,7 +333,7 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do assert log =~ "UnknownPresenceEvent" end - test "socket with presence enabled false will ignore presence events in public channel", %{ + test "socket with presence enabled false will ignore non-track presence events in public channel", %{ tenant: tenant, topic: topic } do @@ -256,12 +341,12 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do policies = %Policies{presence: %PresencePolicies{read: true, write: true}} socket = socket_fixture(tenant, topic, key, policies: policies, private?: false, enabled?: false) - assert {:ok, _socket} = PresenceHandler.handle(%{"event" => "track"}, socket) + assert {:ok, _socket} = PresenceHandler.handle(%{"event" => "untrack"}, nil, socket) topic = socket.assigns.tenant_topic refute_receive %Broadcast{topic: ^topic, event: "presence_diff"} end - test "socket with presence enabled false will ignore presence events in private channel", %{ + test "socket with presence enabled false will ignore non-track presence events in private channel", %{ tenant: tenant, topic: topic, db_conn: db_conn @@ -270,11 +355,80 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do policies = %Policies{presence: %PresencePolicies{read: true, write: true}} socket = socket_fixture(tenant, topic, key, policies: policies, private?: false, enabled?: false) - assert {:ok, _socket} = PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) + assert {:ok, _socket} = PresenceHandler.handle(%{"event" => "untrack"}, db_conn, socket) topic = socket.assigns.tenant_topic refute_receive %Broadcast{topic: ^topic, event: "presence_diff"} end + test "socket with presence disabled will enable presence on track message for public channel", %{ + tenant: tenant, + topic: topic + } do + key = random_string() + policies = %Policies{presence: %PresencePolicies{read: true, write: true}} + socket = socket_fixture(tenant, topic, key, policies: policies, private?: false, enabled?: false) + + refute socket.assigns.presence_enabled? + + assert {:ok, updated_socket} = PresenceHandler.handle(%{"event" => "track"}, nil, socket) + + assert updated_socket.assigns.presence_enabled? + topic = socket.assigns.tenant_topic + assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} + assert Map.has_key?(joins, key) + end + + test "socket with presence disabled will enable presence on track message for private channel", %{ + tenant: tenant, + topic: topic, + db_conn: db_conn + } do + key = random_string() + policies = %Policies{presence: %PresencePolicies{read: true, write: true}} + socket = socket_fixture(tenant, topic, key, policies: policies, private?: true, enabled?: false) + + refute socket.assigns.presence_enabled? + + assert {:ok, updated_socket} = PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) + + assert updated_socket.assigns.presence_enabled? + topic = socket.assigns.tenant_topic + assert_receive %Broadcast{topic: ^topic, event: "presence_diff", payload: %{joins: joins, leaves: %{}}} + assert Map.has_key?(joins, key) + end + + test "socket with presence disabled will not enable presence on untrack message", %{ + tenant: tenant, + topic: topic, + db_conn: db_conn + } do + key = random_string() + policies = %Policies{presence: %PresencePolicies{read: true, write: true}} + socket = socket_fixture(tenant, topic, key, policies: policies, enabled?: false) + + refute socket.assigns.presence_enabled? + + assert {:ok, updated_socket} = PresenceHandler.handle(%{"event" => "untrack"}, db_conn, socket) + + refute updated_socket.assigns.presence_enabled? + topic = socket.assigns.tenant_topic + refute_receive %Broadcast{topic: ^topic, event: "presence_diff"} + end + + test "socket with presence disabled will not enable presence on unknown event", %{ + tenant: tenant, + topic: topic, + db_conn: db_conn + } do + key = random_string() + policies = %Policies{presence: %PresencePolicies{read: true, write: true}} + socket = socket_fixture(tenant, topic, key, policies: policies, enabled?: false) + + refute socket.assigns.presence_enabled? + + assert {:error, :unknown_presence_event} = PresenceHandler.handle(%{"event" => "unknown"}, db_conn, socket) + end + @tag policies: [:authenticated_read_broadcast_and_presence, :authenticated_write_broadcast_and_presence] test "rate limit is checked on private channel", %{tenant: tenant, topic: topic, db_conn: db_conn} do key = random_string() @@ -284,7 +438,8 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do log = capture_log(fn -> for _ <- 1..300, do: PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) - Process.sleep(1100) + + {:ok, _} = RateCounterHelper.tick!(Tenants.presence_events_per_second_rate(tenant)) assert {:error, :rate_limit_exceeded} = PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) end) @@ -299,13 +454,25 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do log = capture_log(fn -> for _ <- 1..300, do: PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) - Process.sleep(1100) + + {:ok, _} = RateCounterHelper.tick!(Tenants.presence_events_per_second_rate(tenant)) assert {:error, :rate_limit_exceeded} = PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) end) assert log =~ "PresenceRateLimitReached" end + + test "fails on high payload size", %{tenant: tenant, topic: topic, db_conn: db_conn} do + key = random_string() + socket = socket_fixture(tenant, topic, key, private?: false) + payload_size = tenant.max_payload_size_in_kb * 1000 + + payload = %{content: random_string(payload_size)} + + assert {:error, :payload_size_exceeded} = + PresenceHandler.handle(%{"event" => "track", "payload" => payload}, db_conn, socket) + end end describe "sync/1" do @@ -356,7 +523,8 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do log = capture_log(fn -> for _ <- 1..300, do: PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) - Process.sleep(1100) + + {:ok, _} = RateCounterHelper.tick!(Tenants.presence_events_per_second_rate(tenant)) assert {:error, :rate_limit_exceeded} = PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) end) @@ -372,7 +540,8 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do log = capture_log(fn -> for _ <- 1..300, do: PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) - Process.sleep(1100) + + {:ok, _} = RateCounterHelper.tick!(Tenants.presence_events_per_second_rate(tenant)) assert {:error, :rate_limit_exceeded} = PresenceHandler.handle(%{"event" => "track"}, db_conn, socket) end) @@ -384,7 +553,7 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do defp initiate_tenant(context) do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) {:ok, db_conn} = Connect.lookup_or_start_connection(tenant.external_id) assert Connect.ready?(tenant.external_id) @@ -447,4 +616,10 @@ defmodule RealtimeWeb.RealtimeChannel.PresenceHandlerTest do } } end + + def handle_telemetry(event, measures, metadata, %{pid: pid, tenant: tenant}) do + if metadata[:tenant] == tenant.external_id do + send(pid, {:telemetry, event, measures, metadata}) + end + end end diff --git a/test/realtime_web/channels/realtime_channel/tracker_test.exs b/test/realtime_web/channels/realtime_channel/tracker_test.exs index 2590b9597..7137256c1 100644 --- a/test/realtime_web/channels/realtime_channel/tracker_test.exs +++ b/test/realtime_web/channels/realtime_channel/tracker_test.exs @@ -1,5 +1,7 @@ defmodule RealtimeWeb.RealtimeChannel.TrackerTest do - use Realtime.DataCase + # It kills websockets when no channels are open + # It can affect other tests + use Realtime.DataCase, async: false alias RealtimeWeb.RealtimeChannel.Tracker setup do diff --git a/test/realtime_web/channels/realtime_channel_test.exs b/test/realtime_web/channels/realtime_channel_test.exs index 2dff83da3..c92e8779a 100644 --- a/test/realtime_web/channels/realtime_channel_test.exs +++ b/test/realtime_web/channels/realtime_channel_test.exs @@ -1,12 +1,11 @@ defmodule RealtimeWeb.RealtimeChannelTest do - # Can't run async true because under the hood Cachex is used and it doesn't see Ecto Sandbox - use RealtimeWeb.ChannelCase, async: false + use RealtimeWeb.ChannelCase, async: true use Mimic import ExUnit.CaptureLog - alias Phoenix.Socket alias Phoenix.Channel.Server + alias Phoenix.Socket alias Realtime.Tenants.Authorization alias Realtime.Tenants.Connect @@ -23,39 +22,540 @@ defmodule RealtimeWeb.RealtimeChannelTest do setup do tenant = Containers.checkout_tenant(run_migrations: true) + Realtime.Tenants.Cache.update_cache(tenant) {:ok, tenant: tenant} end setup :rls_context - describe "presence" do - test "events are counted", %{tenant: tenant} do + describe "process flags" do + test "max heap size is set for both transport and channel processes", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + assert Process.info(socket.transport_pid, :max_heap_size) == + {:max_heap_size, %{error_logger: true, include_shared_binaries: false, kill: true, size: 6_250_000}} + + assert {:ok, _, socket} = subscribe_and_join(socket, "realtime:test", %{}) + + assert Process.info(socket.channel_pid, :max_heap_size) == + {:max_heap_size, %{error_logger: true, include_shared_binaries: false, kill: true, size: 6_250_000}} + end + + # We don't test the socket because on unit tests Phoenix is not setting the fullsweep_after config + test "fullsweep_after is set on channel process", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + assert {:ok, _, socket} = subscribe_and_join(socket, "realtime:test", %{}) + + assert Process.info(socket.channel_pid, :fullsweep_after) == {:fullsweep_after, 20} + end + end + + describe "postgres changes" do + test "subscribes to inserts", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "postgres_changes" => [%{"event" => "INSERT", "schema" => "public", "table" => "test"}] + } + + assert {:ok, reply, _socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + assert %{postgres_changes: [%{:id => sub_id, "event" => "INSERT", "schema" => "public", "table" => "test"}]} = + reply + + assert_push "system", + %{message: "Subscribed to PostgreSQL", status: "ok", extension: "postgres_changes", channel: "test"}, + 5000 + + {:ok, conn} = Connect.lookup_or_start_connection(tenant.external_id) + %{rows: [[id]]} = Postgrex.query!(conn, "insert into test (details) values ('test') returning id", []) + + assert_push "postgres_changes", %{data: data, ids: [^sub_id]}, 500 + + # we encode and decode because the data is a Jason.Fragment + assert %{ + "table" => "test", + "type" => "INSERT", + "record" => %{"details" => "test", "id" => ^id}, + "columns" => [%{"name" => "id", "type" => "int4"}, %{"name" => "details", "type" => "text"}], + "errors" => nil, + "schema" => "public", + "commit_timestamp" => _ + } = Jason.encode!(data) |> Jason.decode!() + + refute_receive %Socket.Message{} + refute_receive %Socket.Reply{} + end + + test "multiple subscriptions", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "postgres_changes" => [ + %{"event" => "INSERT", "schema" => "public", "table" => "test"}, + %{"event" => "DELETE", "schema" => "public", "table" => "test"} + ] + } + + assert {:ok, reply, _socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + assert %{ + postgres_changes: [ + %{:id => sub_id, "event" => "INSERT", "schema" => "public", "table" => "test"}, + %{ + :id => 4_845_530, + "event" => "DELETE", + "schema" => "public", + "table" => "test" + } + ] + } = + reply + + assert_push "system", + %{message: "Subscribed to PostgreSQL", status: "ok", extension: "postgres_changes", channel: "test"}, + 5000 + + {:ok, conn} = Connect.lookup_or_start_connection(tenant.external_id) + %{rows: [[id]]} = Postgrex.query!(conn, "insert into test (details) values ('test') returning id", []) + + assert_push "postgres_changes", %{data: data, ids: [4_845_530, ^sub_id]}, 500 + + # we encode and decode because the data is a Jason.Fragment + assert %{ + "table" => "test", + "type" => "INSERT", + "record" => %{"details" => "test", "id" => ^id}, + "columns" => [%{"name" => "id", "type" => "int4"}, %{"name" => "details", "type" => "text"}], + "errors" => nil, + "schema" => "public", + "commit_timestamp" => _ + } = Jason.encode!(data) |> Jason.decode!() + + refute_receive %Socket.Message{} + refute_receive %Socket.Reply{} + end + + test "malformed subscription params", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "postgres_changes" => [%{"event" => "*", "schema" => "public", "table" => "test", "filter" => "wrong"}] + } + + assert {:ok, reply, socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + assert %{postgres_changes: [%{"event" => "*", "schema" => "public", "table" => "test"}]} = reply + + assert_push "system", + %{ + message: "Error parsing `filter` params: [\"wrong\"]", + status: "error", + extension: "postgres_changes", + channel: "test" + }, + 3000 + + socket = Server.socket(socket.channel_pid) + + # It won't re-subscribe + assert socket.assigns.pg_sub_ref == nil + end + + test "invalid subscription table does not exist", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "postgres_changes" => [%{"event" => "*", "schema" => "public", "table" => "doesnotexist"}] + } + + assert {:ok, reply, socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + assert %{postgres_changes: [%{"event" => "*", "schema" => "public", "table" => "doesnotexist"}]} = reply + + assert_push "system", + %{ + message: + "Unable to subscribe to changes with given parameters. Please check Realtime is enabled for the given connect parameters: [schema: public, table: doesnotexist, filters: []]", + status: "error", + extension: "postgres_changes", + channel: "test" + }, + 5000 + + socket = Server.socket(socket.channel_pid) + + # It won't re-subscribe + assert socket.assigns.pg_sub_ref == nil + end + + test "invalid subscription column does not exist", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "postgres_changes" => [ + %{"event" => "*", "schema" => "public", "table" => "test", "filter" => "notacolumn=eq.123"} + ] + } + + assert {:ok, reply, socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + assert %{postgres_changes: [%{"event" => "*", "schema" => "public", "table" => "test"}]} = reply + + assert_push "system", + %{ + message: + "Unable to subscribe to changes with given parameters. An exception happened so please check your connect parameters: [schema: public, table: test, filters: [{\"notacolumn\", \"eq\", \"123\"}]]. Exception: ERROR P0001 (raise_exception) invalid column for filter notacolumn", + status: "error", + extension: "postgres_changes", + channel: "test" + }, + 5000 + + socket = Server.socket(socket.channel_pid) + + # It won't re-subscribe + assert socket.assigns.pg_sub_ref == nil + end + + test "connection error", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "postgres_changes" => [%{"event" => "*", "schema" => "public", "table" => "test"}] + } + + conn = spawn(fn -> :ok end) + # Let's set the subscription manager conn to be a pid that is no more + + assert {:ok, reply, socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + assert %{postgres_changes: [%{"event" => "*", "schema" => "public", "table" => "test"}]} = reply + + assert_push "system", + %{ + message: "Subscribed to PostgreSQL", + status: "ok", + extension: "postgres_changes", + channel: "test" + }, + 5000 + + {:ok, manager_pid, _conn} = Extensions.PostgresCdcRls.get_manager_conn(tenant.external_id) + Extensions.PostgresCdcRls.update_meta(tenant.external_id, manager_pid, conn) + + assert {:ok, _reply, socket} = subscribe_and_join(socket, "realtime:test_fail", %{"config" => config}) + + assert_push "system", + %{message: message, status: "error", extension: "postgres_changes", channel: "test_fail"}, + 5000 + + assert message =~ "{:error, \"Too many database timeouts\"}" + socket = Server.socket(socket.channel_pid) + + # It will try again in the future + assert socket.assigns.pg_sub_ref != nil + end + end + + describe "broadcast" do + @describetag policies: [:authenticated_all_topic_read] + + test "broadcast map payload", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "broadcast" => %{"self" => true} + } + + assert {:ok, _, socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + push(socket, "broadcast", %{"event" => "my_event", "payload" => %{"hello" => "world"}}) + + assert_receive %Phoenix.Socket.Message{ + topic: "realtime:test", + event: "broadcast", + payload: %{"event" => "my_event", "payload" => %{"hello" => "world"}} + } + end + + test "broadcast non-map payload", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "broadcast" => %{"self" => true} + } + + assert {:ok, _, socket} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + push(socket, "broadcast", "not a map") + + assert_receive %Phoenix.Socket.Message{ + topic: "realtime:test", + event: "broadcast", + payload: "not a map" + } + end + + test "wrong replay params", %{tenant: tenant} do jwt = Generators.generate_jwt_token(tenant) {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) - assert {:ok, _, %Socket{} = socket} = subscribe_and_join(socket, "realtime:test", %{}) + config = %{ + "private" => true, + "broadcast" => %{ + "replay" => %{"limit" => "not a number", "since" => :erlang.system_time(:millisecond) - 5 * 60000} + } + } - presence_diff = %Socket.Broadcast{event: "presence_diff", payload: %{joins: %{}, leaves: %{}}} - send(socket.channel_pid, presence_diff) + assert {:error, %{reason: "UnableToReplayMessages: Replay params are not valid"}} = + subscribe_and_join(socket, "realtime:test", %{"config" => config}) - assert_receive %Socket.Message{topic: "realtime:test", event: "presence_state", payload: %{}} + config = %{ + "private" => true, + "broadcast" => %{ + "replay" => %{"limit" => 1, "since" => "not a number"} + } + } + + assert {:error, %{reason: "UnableToReplayMessages: Replay params are not valid"}} = + subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + config = %{ + "private" => true, + "broadcast" => %{ + "replay" => %{} + } + } + + assert {:error, %{reason: "UnableToReplayMessages: Replay params are not valid"}} = + subscribe_and_join(socket, "realtime:test", %{"config" => config}) + end + + test "failure to replay", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) + + config = %{ + "private" => true, + "broadcast" => %{ + "replay" => %{"limit" => 12, "since" => :erlang.system_time(:millisecond) - 5 * 60000} + } + } + + Authorization + |> expect(:get_read_authorizations, fn _, _, _ -> + {:ok, + %Authorization.Policies{ + broadcast: %Authorization.Policies.BroadcastPolicies{read: true, write: nil} + }} + end) + + # Broken database connection + conn = spawn(fn -> :ok end) + Connect.lookup_or_start_connection(tenant.external_id) + {:ok, _} = :syn.update_registry(Connect, tenant.external_id, fn _pid, meta -> %{meta | conn: conn} end) + + assert {:error, %{reason: "UnableToReplayMessages: Realtime was unable to replay messages"}} = + subscribe_and_join(socket, "realtime:test", %{"config" => config}) + end + + test "replay messages on public topic not allowed", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) + + config = %{ + "presence" => %{"enabled" => false}, + "broadcast" => %{"replay" => %{"limit" => 2, "since" => :erlang.system_time(:millisecond) - 5 * 60000}} + } + + assert { + :error, + %{reason: "UnableToReplayMessages: Replay is not allowed for public channels"} + } = subscribe_and_join(socket, "realtime:test", %{"config" => config}) + + refute_receive %Socket.Message{} + refute_receive %Socket.Reply{} + end + + @tag policies: [:authenticated_all_topic_read] + test "replay messages on private topic", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) + + # Old message + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :day), + "event" => "old", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "old"} + }) + + %{id: message1_id} = + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :minute), + "event" => "first", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "first"} + }) + + %{id: message2_id} = + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-2, :minute), + "event" => "second", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "second"} + }) + + # This one should not be received because of the limit + message_fixture(tenant, %{ + "private" => true, + "inserted_at" => NaiveDateTime.utc_now() |> NaiveDateTime.add(-3, :minute), + "event" => "third", + "extension" => "broadcast", + "topic" => "test", + "payload" => %{"value" => "third"} + }) + + config = %{ + "private" => true, + "presence" => %{"enabled" => false}, + "broadcast" => %{"replay" => %{"limit" => 2, "since" => :erlang.system_time(:millisecond) - 5 * 60000}} + } + + assert {:ok, _, %Socket{}} = subscribe_and_join(socket, "realtime:test", %{"config" => config}) assert_receive %Socket.Message{ topic: "realtime:test", - event: "presence_diff", - payload: %{joins: %{}, leaves: %{}} + event: "broadcast", + payload: %{ + "event" => "first", + "meta" => %{"id" => ^message1_id, "replayed" => true}, + "payload" => %{"value" => "first"}, + "type" => "broadcast" + } } - tenant_id = tenant.external_id + assert_receive %Socket.Message{ + topic: "realtime:test", + event: "broadcast", + payload: %{ + "event" => "second", + "meta" => %{"id" => ^message2_id, "replayed" => true}, + "payload" => %{"value" => "second"}, + "type" => "broadcast" + } + } + + refute_receive %Socket.Message{} + end + end + + describe "presence" do + test "presence state event is counted", %{tenant: tenant} do + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) - # Wait for RateCounter to tick - Process.sleep(1100) + assert {:ok, _, %Socket{} = socket} = subscribe_and_join(socket, "realtime:test", %{}) + + assert_receive %Socket.Message{topic: "realtime:test", event: "presence_state", payload: %{}} + + tenant_id = tenant.external_id assert {:ok, %RateCounter{id: {:channel, :presence_events, ^tenant_id}, bucket: bucket}} = - RateCounter.get(socket.assigns.presence_rate_counter) + RateCounterHelper.tick!(socket.assigns.presence_rate_counter) + + # presence_state + assert Enum.sum(bucket) == 1 + end + + test "presence track closes on high payload size", %{tenant: tenant} do + topic = "realtime:test" + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) + + assert {:ok, _, %Socket{} = socket} = subscribe_and_join(socket, topic, %{}) + + assert_receive %Phoenix.Socket.Message{topic: "realtime:test", event: "presence_state"}, 500 + + payload = %{ + type: "presence", + event: "TRACK", + payload: %{name: "realtime_presence_96", t: 1814.7000000029802, content: String.duplicate("a", 3_500_000)} + } + + push(socket, "presence", payload) + + assert_receive %Phoenix.Socket.Message{ + event: "system", + payload: %{ + extension: "system", + message: "Track message size exceeded", + status: "error" + }, + topic: ^topic + }, + 500 + end + + test "presence track with same payload does nothing", %{tenant: tenant} do + topic = "realtime:test" + jwt = Generators.generate_jwt_token(tenant) + {:ok, %Socket{} = socket} = connect(UserSocket, %{"log_level" => "warning"}, conn_opts(tenant, jwt)) + + assert {:ok, _, %Socket{} = socket} = + subscribe_and_join(socket, topic, %{config: %{presence: %{enabled: true, key: "my_key"}}}) - # presence_state + presence_diff - assert 2 in bucket + assert_receive %Phoenix.Socket.Message{topic: "realtime:test", event: "presence_state"}, 500 + + payload = %{type: "presence", event: "TRACK", payload: %{"hello" => "world"}} + + push(socket, "presence", payload) + + assert_receive %Socket.Reply{payload: %{}, topic: "realtime:test", status: :ok}, 500 + + assert_receive %Socket.Message{ + payload: %{ + joins: %{"my_key" => %{metas: [%{:phx_ref => _, "hello" => "world"}]}}, + leaves: %{} + }, + topic: "realtime:test", + event: "presence_diff" + }, + 500 + + push(socket, "presence", payload) + + assert_receive %Socket.Reply{payload: %{}, topic: "realtime:test", status: :ok}, 500 + # no presence_diff this time + + refute_receive %Socket.Message{} + refute_receive %Socket.Reply{} end end @@ -762,7 +1262,10 @@ defmodule RealtimeWeb.RealtimeChannelTest do put_in(extension, ["settings", "db_port"], db_port) ] - Realtime.Api.update_tenant(tenant, %{extensions: extensions}) + with {:ok, tenant} <- Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{extensions: extensions}) do + Realtime.Tenants.Cache.update_cache(tenant) + {:ok, tenant} + end end defp assert_process_down(pid) do diff --git a/test/realtime_web/channels/tenant_rate_limiters_test.exs b/test/realtime_web/channels/tenant_rate_limiters_test.exs new file mode 100644 index 000000000..05d56ec82 --- /dev/null +++ b/test/realtime_web/channels/tenant_rate_limiters_test.exs @@ -0,0 +1,31 @@ +defmodule RealtimeWeb.TenantRateLimitersTest do + use Realtime.DataCase, async: true + + use Mimic + alias RealtimeWeb.TenantRateLimiters + alias Realtime.Api.Tenant + + setup do + tenant = %Tenant{external_id: random_string(), max_concurrent_users: 1, max_joins_per_second: 1} + + %{tenant: tenant} + end + + describe "check_tenant/1" do + test "rate is not exceeded", %{tenant: tenant} do + assert TenantRateLimiters.check_tenant(tenant) == :ok + end + + test "max concurrent users is exceeded", %{tenant: tenant} do + Realtime.UsersCounter.add(self(), tenant.external_id) + + assert TenantRateLimiters.check_tenant(tenant) == {:error, :too_many_connections} + end + + test "max joins is exceeded", %{tenant: tenant} do + expect(Realtime.RateCounter, :get, fn _ -> {:ok, %{limit: %{triggered: true}}} end) + + assert TenantRateLimiters.check_tenant(tenant) == {:error, :too_many_joins} + end + end +end diff --git a/test/realtime_web/controllers/broadcast_controller_test.exs b/test/realtime_web/controllers/broadcast_controller_test.exs index 9c38d58bd..73ab4148e 100644 --- a/test/realtime_web/controllers/broadcast_controller_test.exs +++ b/test/realtime_web/controllers/broadcast_controller_test.exs @@ -18,7 +18,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do setup %{conn: conn} do tenant = Containers.checkout_tenant(run_migrations: true) # Warm cache to avoid Cachex and Ecto.Sandbox ownership issues - Cachex.put!(Realtime.Tenants.Cache, {{:get_tenant_by_external_id, 1}, [tenant.external_id]}, {:cached, tenant}) + Realtime.Tenants.Cache.update_cache(tenant) conn = generate_conn(conn, tenant) @@ -141,16 +141,38 @@ defmodule RealtimeWeb.BroadcastControllerTest do assert conn.status == 422 - # Wait for counters to increment. RateCounter tick is 1 second - Process.sleep(2000) - {:ok, rate_counter} = RateCounter.get(Tenants.requests_per_second_rate(tenant)) + {:ok, rate_counter} = RateCounterHelper.tick!(Tenants.requests_per_second_rate(tenant)) assert rate_counter.avg != 0.0 - {:ok, rate_counter} = RateCounter.get(Tenants.events_per_second_rate(tenant)) + {:ok, rate_counter} = RateCounterHelper.tick!(Tenants.events_per_second_rate(tenant)) assert rate_counter.avg == 0.0 refute_receive {:socket_push, _, _} end + + test "returns 422 when batch of messages includes a message that exceeds the tenant payload size", %{ + conn: conn, + tenant: tenant + } do + sub_topic_1 = "sub_topic_1" + sub_topic_2 = "sub_topic_2" + + payload_1 = %{"data" => "data"} + payload_2 = %{"data" => random_string(tenant.max_payload_size_in_kb * 1000 + 100)} + event_1 = "event_1" + event_2 = "event_2" + + conn = + post(conn, Routes.broadcast_path(conn, :broadcast), %{ + "messages" => [ + %{"topic" => sub_topic_1, "payload" => payload_1, "event" => event_1}, + %{"topic" => sub_topic_1, "payload" => payload_1, "event" => event_1}, + %{"topic" => sub_topic_2, "payload" => payload_2, "event" => event_2} + ] + }) + + assert conn.status == 422 + end end describe "too many requests" do @@ -272,7 +294,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do } do request_events_key = Tenants.requests_per_second_key(tenant) broadcast_events_key = Tenants.events_per_second_key(tenant) - expect(TenantBroadcaster, :pubsub_broadcast, 5, fn _, _, _, _ -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 5, fn _, _, _, _, _ -> :ok end) messages_to_send = Stream.repeatedly(fn -> generate_message_with_policies(db_conn, tenant) end) @@ -294,7 +316,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do conn = post(conn, Routes.broadcast_path(conn, :broadcast), %{"messages" => messages}) - broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/4) + broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/5) Enum.each(messages_to_send, fn %{topic: topic} -> broadcast_topic = Tenants.tenant_topic(tenant, topic, false) @@ -310,7 +332,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do } assert Enum.any?(broadcast_calls, fn - [_, ^broadcast_topic, ^message, RealtimeChannel.MessageDispatcher] -> true + [_, ^broadcast_topic, ^message, RealtimeChannel.MessageDispatcher, :broadcast] -> true _ -> false end) end) @@ -326,7 +348,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do } do request_events_key = Tenants.requests_per_second_key(tenant) broadcast_events_key = Tenants.events_per_second_key(tenant) - expect(TenantBroadcaster, :pubsub_broadcast, 6, fn _, _, _, _ -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 6, fn _, _, _, _, _ -> :ok end) channels = Stream.repeatedly(fn -> generate_message_with_policies(db_conn, tenant) end) @@ -358,7 +380,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do conn = post(conn, Routes.broadcast_path(conn, :broadcast), %{"messages" => messages}) - broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/4) + broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/5) Enum.each(channels, fn %{topic: topic} -> broadcast_topic = Tenants.tenant_topic(tenant, topic, false) @@ -374,7 +396,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do } assert Enum.count(broadcast_calls, fn - [_, ^broadcast_topic, ^message, RealtimeChannel.MessageDispatcher] -> true + [_, ^broadcast_topic, ^message, RealtimeChannel.MessageDispatcher, :broadcast] -> true _ -> false end) == 1 end) @@ -393,7 +415,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do open_channel_topic = Tenants.tenant_topic(tenant, "open_channel", true) assert Enum.count(broadcast_calls, fn - [_, ^open_channel_topic, ^message, RealtimeChannel.MessageDispatcher] -> true + [_, ^open_channel_topic, ^message, RealtimeChannel.MessageDispatcher, :broadcast] -> true _ -> false end) == 1 @@ -408,7 +430,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do } do request_events_key = Tenants.requests_per_second_key(tenant) broadcast_events_key = Tenants.events_per_second_key(tenant) - expect(TenantBroadcaster, :pubsub_broadcast, 5, fn _, _, _, _ -> :ok end) + expect(TenantBroadcaster, :pubsub_broadcast, 5, fn _, _, _, _, _ -> :ok end) messages_to_send = Stream.repeatedly(fn -> generate_message_with_policies(db_conn, tenant) end) @@ -428,11 +450,12 @@ defmodule RealtimeWeb.BroadcastControllerTest do GenCounter |> expect(:add, fn ^request_events_key -> :ok end) - |> expect(:add, length(messages_to_send), fn ^broadcast_events_key -> :ok end) + # remove the one message that won't be broadcasted for this user + |> expect(:add, length(messages) - 1, fn ^broadcast_events_key -> :ok end) conn = post(conn, Routes.broadcast_path(conn, :broadcast), %{"messages" => messages}) - broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/4) + broadcast_calls = calls(&TenantBroadcaster.pubsub_broadcast/5) Enum.each(messages_to_send, fn %{topic: topic} -> broadcast_topic = Tenants.tenant_topic(tenant, topic, false) @@ -448,7 +471,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do } assert Enum.count(broadcast_calls, fn - [_, ^broadcast_topic, ^message, RealtimeChannel.MessageDispatcher] -> true + [_, ^broadcast_topic, ^message, RealtimeChannel.MessageDispatcher, :broadcast] -> true _ -> false end) == 1 end) @@ -461,7 +484,7 @@ defmodule RealtimeWeb.BroadcastControllerTest do @tag role: "anon" test "user without permission won't broadcast", %{conn: conn, db_conn: db_conn, tenant: tenant} do request_events_key = Tenants.requests_per_second_key(tenant) - reject(&TenantBroadcaster.pubsub_broadcast/4) + reject(&TenantBroadcaster.pubsub_broadcast/5) messages = Stream.repeatedly(fn -> generate_message_with_policies(db_conn, tenant) end) @@ -482,7 +505,6 @@ defmodule RealtimeWeb.BroadcastControllerTest do GenCounter |> expect(:add, fn ^request_events_key -> 1 end) - |> reject(:add, 1) conn = post(conn, Routes.broadcast_path(conn, :broadcast), %{"messages" => messages}) diff --git a/test/realtime_web/controllers/metrics_controller_test.exs b/test/realtime_web/controllers/metrics_controller_test.exs index f16edc83f..52453271c 100644 --- a/test/realtime_web/controllers/metrics_controller_test.exs +++ b/test/realtime_web/controllers/metrics_controller_test.exs @@ -2,11 +2,23 @@ defmodule RealtimeWeb.MetricsControllerTest do # Usage of Clustered # Also changing Application env use RealtimeWeb.ConnCase, async: false + alias Realtime.GenRpc import ExUnit.CaptureLog + use Mimic setup_all do - {:ok, _} = Clustered.start(nil, extra_config: [{:realtime, :region, "ap-southeast-2"}]) + metrics_tags = %{ + region: "ap-southeast-2", + host: "anothernode@something.com", + id: "someid" + } + + {:ok, _} = + Clustered.start(nil, + extra_config: [{:realtime, :region, "ap-southeast-2"}, {:realtime, :metrics_tags, metrics_tags}] + ) + :ok end @@ -30,14 +42,18 @@ defmodule RealtimeWeb.MetricsControllerTest do assert response =~ "# HELP beam_system_schedulers_online_info The number of scheduler threads that are online." - assert response =~ "region=\"ap-southeast-2" - assert response =~ "region=\"us-east-1" + assert response =~ "region=\"ap-southeast-2\"" + assert response =~ "region=\"us-east-1\"" end test "returns 200 and log on timeout", %{conn: conn} do - current_value = Application.get_env(:realtime, :metrics_rpc_timeout) - on_exit(fn -> Application.put_env(:realtime, :metrics_rpc_timeout, current_value) end) - Application.put_env(:realtime, :metrics_rpc_timeout, 0) + Mimic.stub(GenRpc, :call, fn node, mod, func, args, opts -> + if node != node() do + {:error, :rpc_error, :timeout} + else + call_original(GenRpc, :call, [node, mod, func, args, opts]) + end + end) log = capture_log(fn -> @@ -74,4 +90,64 @@ defmodule RealtimeWeb.MetricsControllerTest do |> response(403) end end + + describe "GET /metrics/:region" do + setup %{conn: conn} do + # The metrics pipeline requires authentication + jwt_secret = Application.fetch_env!(:realtime, :metrics_jwt_secret) + token = generate_jwt_token(jwt_secret, %{}) + authenticated_conn = put_req_header(conn, "authorization", "Bearer #{token}") + + {:ok, conn: authenticated_conn} + end + + test "returns 200", %{conn: conn} do + assert response = + conn + |> get(~p"/metrics/ap-southeast-2") + |> text_response(200) + + # Check prometheus like metrics + assert response =~ + "# HELP beam_system_schedulers_online_info The number of scheduler threads that are online." + + assert response =~ "region=\"ap-southeast-2\"" + refute response =~ "region=\"us-east-1\"" + end + + test "returns 200 and log on timeout", %{conn: conn} do + Mimic.stub(GenRpc, :call, fn _node, _mod, _func, _args, _opts -> + {:error, :rpc_error, :timeout} + end) + + log = + capture_log(fn -> + assert response = + conn + |> get(~p"/metrics/ap-southeast-2") + |> text_response(200) + + assert response == "" + end) + + assert log =~ "Cannot fetch metrics from the node" + end + + test "returns 403 when authorization header is missing", %{conn: conn} do + assert conn + |> delete_req_header("authorization") + |> get(~p"/metrics/ap-southeast-2") + |> response(403) + end + + test "returns 403 when authorization header is wrong", %{conn: conn} do + token = generate_jwt_token("bad_secret", %{}) + + assert _ = + conn + |> put_req_header("authorization", "Bearer #{token}") + |> get(~p"/metrics/ap-southeast-2") + |> response(403) + end + end end diff --git a/test/realtime_web/controllers/tenant_controller_test.exs b/test/realtime_web/controllers/tenant_controller_test.exs index 3974e7e7b..95c7ab762 100644 --- a/test/realtime_web/controllers/tenant_controller_test.exs +++ b/test/realtime_web/controllers/tenant_controller_test.exs @@ -334,8 +334,9 @@ defmodule RealtimeWeb.TenantControllerTest do setup [:with_tenant] setup do + previous_region = Application.get_env(:realtime, :region) Application.put_env(:realtime, :region, "us-east-1") - on_exit(fn -> Application.put_env(:realtime, :region, nil) end) + on_exit(fn -> Application.put_env(:realtime, :region, previous_region) end) end test "health check when tenant does not exist", %{conn: conn} do @@ -418,7 +419,7 @@ defmodule RealtimeWeb.TenantControllerTest do conn = get(conn, ~p"/api/tenants/#{tenant.external_id}/health") data = json_response(conn, 200)["data"] - Process.sleep(2000) + Process.sleep(1000) assert {:ok, %{rows: []}} = Postgrex.query(db_conn, "SELECT * FROM realtime.messages", []) diff --git a/test/realtime_web/live/status_live/index_test.exs b/test/realtime_web/live/status_live/index_test.exs new file mode 100644 index 000000000..ae3af0ad0 --- /dev/null +++ b/test/realtime_web/live/status_live/index_test.exs @@ -0,0 +1,33 @@ +defmodule RealtimeWeb.StatusLive.IndexTest do + use RealtimeWeb.ConnCase + import Phoenix.LiveViewTest + + alias Realtime.Latency.Payload + alias Realtime.Nodes + alias RealtimeWeb.Endpoint + + describe "Status LiveView" do + test "renders status page", %{conn: conn} do + {:ok, _view, html} = live(conn, ~p"/status") + + assert html =~ "Realtime Status" + end + + test "receives broadcast from PubSub", %{conn: conn} do + {:ok, view, _html} = live(conn, ~p"/status") + + payload = %Payload{ + from_node: Nodes.short_node_id_from_name(:"pink@127.0.0.1"), + node: Nodes.short_node_id_from_name(:"orange@127.0.0.1"), + latency: "42ms", + timestamp: DateTime.utc_now() + } + + Endpoint.broadcast("admin:cluster", "ping", payload) + + html = render(view) + assert html =~ "42ms" + assert html =~ "pink@127.0.0.1_orange@127.0.0.1" + end + end +end diff --git a/test/realtime_web/plugs/rate_limiter_test.exs b/test/realtime_web/plugs/rate_limiter_test.exs index 78b22fc8f..1cca58346 100644 --- a/test/realtime_web/plugs/rate_limiter_test.exs +++ b/test/realtime_web/plugs/rate_limiter_test.exs @@ -47,9 +47,7 @@ defmodule RealtimeWeb.Plugs.RateLimiterTest do end test "serve a 200 when rate limit is set to 100", %{conn: conn} do - {:ok, _tenant} = - Api.get_tenant_by_external_id(@tenant["external_id"]) - |> Api.update_tenant(%{"max_events_per_second" => 100}) + {:ok, _tenant} = Api.update_tenant_by_external_id(@tenant["external_id"], %{"max_events_per_second" => 100}) conn = conn diff --git a/test/realtime_web/socket/v2_serializer_test.exs b/test/realtime_web/socket/v2_serializer_test.exs new file mode 100644 index 000000000..2d83e1ea1 --- /dev/null +++ b/test/realtime_web/socket/v2_serializer_test.exs @@ -0,0 +1,553 @@ +defmodule RealtimeWeb.Socket.V2SerializerTest do + use ExUnit.Case, async: true + + alias Phoenix.Socket.{Broadcast, Message, Reply} + alias RealtimeWeb.Socket.UserBroadcast + alias RealtimeWeb.Socket.V2Serializer + + @serializer V2Serializer + @v2_fastlane_json "[null,null,\"t\",\"e\",{\"m\":1}]" + @v2_msg_json "[null,null,\"t\",\"e\",{\"m\":1}]" + + @client_push << + # push + 0::size(8), + # join_ref_size + 2, + # ref_size + 3, + # topic_size + 5, + # event_size + 5, + "12", + "123", + "topic", + "event", + 101, + 102, + 103 + >> + + @client_binary_user_broadcast_push << + # user broadcast push + 3::size(8), + # join_ref_size + 2, + # ref_size + 3, + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 0, + # binary encoding + 0::size(8), + "12", + "123", + "topic", + "user_event", + 101, + 102, + 103 + >> + + @client_json_user_broadcast_push << + # user broadcast push + 3::size(8), + # join_ref_size + 2, + # ref_size + 3, + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 0, + # json encoding + 1::size(8), + "12", + "123", + "topic", + "user_event", + 123, + 34, + 97, + 34, + 58, + 34, + 98, + 34, + 125 + >> + + @client_binary_user_broadcast_push_with_metadata << + # user broadcast push + 3::size(8), + # join_ref_size + 2, + # ref_size + 3, + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 14, + # binary encoding + 0::size(8), + "12", + "123", + "topic", + "user_event", + ~s<{"store":true}>, + 101, + 102, + 103 + >> + + @reply << + # reply + 1::size(8), + # join_ref_size + 2, + # ref_size + 3, + # topic_size + 5, + # status_size + 2, + "12", + "123", + "topic", + "ok", + 101, + 102, + 103 + >> + + @broadcast << + # broadcast + 2::size(8), + # topic_size + 5, + # event_size + 5, + "topic", + "event", + 101, + 102, + 103 + >> + + @binary_user_broadcast << + # user broadcast + 4::size(8), + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 17, + # binary encoding + 0::size(8), + "topic", + "user_event", + # metadata + 123, + 34, + 114, + 101, + 112, + 108, + 97, + 121, + 101, + 100, + 34, + 58, + 116, + 114, + 117, + 101, + 125, + # payload + 101, + 102, + 103 + >> + + @binary_user_broadcast_no_metadata << + # user broadcast + 4::size(8), + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 0, + # binary encoding + 0::size(8), + "topic", + "user_event", + # metadata + # payload + 101, + 102, + 103 + >> + + @json_user_broadcast << + # user broadcast + 4::size(8), + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 17, + # json encoding + 1::size(8), + "topic", + "user_event", + # metadata + 123, + 34, + 114, + 101, + 112, + 108, + 97, + 121, + 101, + 100, + 34, + 58, + 116, + 114, + 117, + 101, + 125, + # payload + 123, + 34, + 97, + 34, + 58, + 34, + 98, + 34, + 125 + >> + + @json_user_broadcast_no_metadata << + # broadcast + 4::size(8), + # topic_size + 5, + # user_event_size + 10, + # metadata_size + 0, + # json encoding + 1::size(8), + "topic", + "user_event", + # metadata + # payload + 123, + 34, + 97, + 34, + 58, + 34, + 98, + 34, + 125 + >> + + defp encode!(serializer, msg) do + case serializer.encode!(msg) do + {:socket_push, :text, encoded} -> + assert is_list(encoded) + IO.iodata_to_binary(encoded) + + {:socket_push, :binary, encoded} -> + assert is_binary(encoded) + encoded + end + end + + defp decode!(serializer, msg, opts), do: serializer.decode!(msg, opts) + + defp fastlane!(serializer, msg) do + case serializer.fastlane!(msg) do + {:socket_push, :text, encoded} -> + assert is_list(encoded) + IO.iodata_to_binary(encoded) + + {:socket_push, :binary, encoded} -> + assert is_binary(encoded) + encoded + end + end + + test "encode!/1 encodes `Phoenix.Socket.Message` as JSON" do + msg = %Message{topic: "t", event: "e", payload: %{m: 1}} + assert encode!(@serializer, msg) == @v2_msg_json + end + + test "encode!/1 raises when payload is not a map" do + msg = %Message{topic: "t", event: "e", payload: "invalid"} + assert_raise ArgumentError, fn -> encode!(@serializer, msg) end + end + + test "encode!/1 encodes `Phoenix.Socket.Reply` as JSON" do + msg = %Reply{topic: "t", payload: %{m: 1}} + encoded = encode!(@serializer, msg) + + assert Jason.decode!(encoded) == [ + nil, + nil, + "t", + "phx_reply", + %{"response" => %{"m" => 1}, "status" => nil} + ] + end + + test "decode!/2 decodes `Phoenix.Socket.Message` from JSON" do + assert %Message{topic: "t", event: "e", payload: %{"m" => 1}} == + decode!(@serializer, @v2_msg_json, opcode: :text) + end + + test "fastlane!/1 encodes a broadcast into a message as JSON" do + msg = %Broadcast{topic: "t", event: "e", payload: %{m: 1}} + assert fastlane!(@serializer, msg) == @v2_fastlane_json + end + + test "fastlane!/1 raises when payload is not a map" do + msg = %Broadcast{topic: "t", event: "e", payload: "invalid"} + assert_raise ArgumentError, fn -> fastlane!(@serializer, msg) end + end + + describe "binary encode" do + test "general pushed message" do + push = << + # push + 0::size(8), + # join_ref_size + 2, + # topic_size + 5, + # event_size + 5, + "12", + "topic", + "event", + 101, + 102, + 103 + >> + + assert encode!(@serializer, %Phoenix.Socket.Message{ + join_ref: "12", + ref: nil, + topic: "topic", + event: "event", + payload: {:binary, <<101, 102, 103>>} + }) == push + end + + test "encode with oversized headers" do + assert_raise ArgumentError, ~r/unable to convert topic to binary/, fn -> + encode!(@serializer, %Phoenix.Socket.Message{ + join_ref: "12", + ref: nil, + topic: String.duplicate("t", 256), + event: "event", + payload: {:binary, <<101, 102, 103>>} + }) + end + + assert_raise ArgumentError, ~r/unable to convert event to binary/, fn -> + encode!(@serializer, %Phoenix.Socket.Message{ + join_ref: "12", + ref: nil, + topic: "topic", + event: String.duplicate("e", 256), + payload: {:binary, <<101, 102, 103>>} + }) + end + + assert_raise ArgumentError, ~r/unable to convert join_ref to binary/, fn -> + encode!(@serializer, %Phoenix.Socket.Message{ + join_ref: String.duplicate("j", 256), + ref: nil, + topic: "topic", + event: "event", + payload: {:binary, <<101, 102, 103>>} + }) + end + end + + test "reply" do + assert encode!(@serializer, %Phoenix.Socket.Reply{ + join_ref: "12", + ref: "123", + topic: "topic", + status: :ok, + payload: {:binary, <<101, 102, 103>>} + }) == @reply + end + + test "reply with oversized headers" do + assert_raise ArgumentError, ~r/unable to convert ref to binary/, fn -> + encode!(@serializer, %Phoenix.Socket.Reply{ + join_ref: "12", + ref: String.duplicate("r", 256), + topic: "topic", + status: :ok, + payload: {:binary, <<101, 102, 103>>} + }) + end + end + + test "fastlane binary Broadcast" do + assert fastlane!(@serializer, %Broadcast{ + topic: "topic", + event: "event", + payload: {:binary, <<101, 102, 103>>} + }) == @broadcast + end + + test "fastlane binary UserBroadcast" do + assert fastlane!(@serializer, %UserBroadcast{ + topic: "topic", + user_event: "user_event", + metadata: %{"replayed" => true}, + user_payload_encoding: :binary, + user_payload: <<101, 102, 103>> + }) == @binary_user_broadcast + end + + test "fastlane binary UserBroadcast no metadata" do + assert fastlane!(@serializer, %UserBroadcast{ + topic: "topic", + user_event: "user_event", + metadata: nil, + user_payload_encoding: :binary, + user_payload: <<101, 102, 103>> + }) == @binary_user_broadcast_no_metadata + end + + test "fastlane json UserBroadcast" do + assert fastlane!(@serializer, %UserBroadcast{ + topic: "topic", + user_event: "user_event", + metadata: %{"replayed" => true}, + user_payload_encoding: :json, + user_payload: "{\"a\":\"b\"}" + }) == @json_user_broadcast + end + + test "fastlane json UserBroadcast no metadata" do + assert fastlane!(@serializer, %UserBroadcast{ + topic: "topic", + user_event: "user_event", + user_payload_encoding: :json, + user_payload: "{\"a\":\"b\"}" + }) == @json_user_broadcast_no_metadata + end + + test "fastlane with oversized headers" do + assert_raise ArgumentError, ~r/unable to convert topic to binary/, fn -> + fastlane!(@serializer, %Broadcast{ + topic: String.duplicate("t", 256), + event: "event", + payload: {:binary, <<101, 102, 103>>} + }) + end + + assert_raise ArgumentError, ~r/unable to convert event to binary/, fn -> + fastlane!(@serializer, %Broadcast{ + topic: "topic", + event: String.duplicate("e", 256), + payload: {:binary, <<101, 102, 103>>} + }) + end + + assert_raise ArgumentError, ~r/unable to convert topic to binary/, fn -> + fastlane!(@serializer, %UserBroadcast{ + topic: String.duplicate("t", 256), + user_event: "user_event", + user_payload_encoding: :json, + user_payload: "{\"a\":\"b\"}" + }) + end + + assert_raise ArgumentError, ~r/unable to convert user_event to binary/, fn -> + fastlane!(@serializer, %UserBroadcast{ + topic: "topic", + user_event: String.duplicate("e", 256), + user_payload_encoding: :json, + user_payload: "{\"a\":\"b\"}" + }) + end + + assert_raise ArgumentError, ~r/unable to convert metadata to binary/, fn -> + fastlane!(@serializer, %UserBroadcast{ + topic: "topic", + user_event: "user_event", + metadata: %{k: String.duplicate("e", 256)}, + user_payload_encoding: :json, + user_payload: "{\"a\":\"b\"}" + }) + end + end + end + + describe "binary decode" do + test "pushed message" do + assert decode!(@serializer, @client_push, opcode: :binary) == %Phoenix.Socket.Message{ + join_ref: "12", + ref: "123", + topic: "topic", + event: "event", + payload: {:binary, <<101, 102, 103>>} + } + end + + test "binary user pushed message with metadata" do + assert decode!(@serializer, @client_binary_user_broadcast_push_with_metadata, opcode: :binary) == + %Phoenix.Socket.Message{ + join_ref: "12", + ref: "123", + topic: "topic", + event: "broadcast", + payload: {"user_event", :binary, <<101, 102, 103>>, %{"store" => true}} + } + end + + test "binary user pushed message" do + assert decode!(@serializer, @client_binary_user_broadcast_push, opcode: :binary) == %Phoenix.Socket.Message{ + join_ref: "12", + ref: "123", + topic: "topic", + event: "broadcast", + payload: {"user_event", :binary, <<101, 102, 103>>, %{}} + } + end + + test "json binary user pushed message" do + assert decode!(@serializer, @client_json_user_broadcast_push, opcode: :binary) == %Phoenix.Socket.Message{ + join_ref: "12", + ref: "123", + topic: "topic", + event: "broadcast", + payload: {"user_event", :json, "{\"a\":\"b\"}", %{}} + } + end + end +end diff --git a/test/realtime_web/tenant_broadcaster_test.exs b/test/realtime_web/tenant_broadcaster_test.exs index d9afbf641..163a1236b 100644 --- a/test/realtime_web/tenant_broadcaster_test.exs +++ b/test/realtime_web/tenant_broadcaster_test.exs @@ -1,5 +1,5 @@ defmodule RealtimeWeb.TenantBroadcasterTest do - # Usage of Clustered + # Usage of Clustered and changing Application env use Realtime.DataCase, async: false alias Phoenix.Socket.Broadcast @@ -33,6 +33,7 @@ defmodule RealtimeWeb.TenantBroadcasterTest do end setup context do + tenant_id = random_string() Endpoint.subscribe(@topic) :erpc.call(context.node, Subscriber, :subscribe, [self(), @topic]) @@ -44,100 +45,208 @@ defmodule RealtimeWeb.TenantBroadcasterTest do __MODULE__, [:realtime, :tenants, :payload, :size], &__MODULE__.handle_telemetry/4, - pid: self() + %{pid: self(), tenant: tenant_id} ) - :ok + original = Application.fetch_env!(:realtime, :pubsub_adapter) + on_exit(fn -> Application.put_env(:realtime, :pubsub_adapter, original) end) + Application.put_env(:realtime, :pubsub_adapter, context.pubsub_adapter) + + {:ok, tenant_id: tenant_id} end - describe "pubsub_broadcast/4" do - test "pubsub_broadcast", %{node: node} do - message = %Broadcast{topic: @topic, event: "an event", payload: %{"a" => "b"}} - TenantBroadcaster.pubsub_broadcast("realtime-dev", @topic, message, Phoenix.PubSub) + for pubsub_adapter <- [:gen_rpc, :pg2] do + describe "pubsub_broadcast/5 #{pubsub_adapter}" do + @describetag pubsub_adapter: pubsub_adapter - assert_receive ^message + test "pubsub_broadcast", %{node: node, tenant_id: tenant_id} do + message = %Broadcast{topic: @topic, event: "an event", payload: %{"a" => "b"}} + TenantBroadcaster.pubsub_broadcast(tenant_id, @topic, message, Phoenix.PubSub, :broadcast) - # Remote node received the broadcast - assert_receive {:relay, ^node, ^message} + assert_receive ^message - assert_receive { - :telemetry, - [:realtime, :tenants, :payload, :size], - %{size: 114}, - %{tenant: "realtime-dev"} - } - end + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} + + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 114}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + end + + test "pubsub_broadcast list payload", %{node: node, tenant_id: tenant_id} do + message = %Broadcast{topic: @topic, event: "an event", payload: ["a", %{"b" => "c"}, 1, 23]} + TenantBroadcaster.pubsub_broadcast(tenant_id, @topic, message, Phoenix.PubSub, :broadcast) + + assert_receive ^message - test "pubsub_broadcast list payload", %{node: node} do - message = %Broadcast{topic: @topic, event: "an event", payload: ["a", %{"b" => "c"}, 1, 23]} - TenantBroadcaster.pubsub_broadcast("realtime-dev", @topic, message, Phoenix.PubSub) + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} - assert_receive ^message + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 130}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + end - # Remote node received the broadcast - assert_receive {:relay, ^node, ^message} + test "pubsub_broadcast string payload", %{node: node, tenant_id: tenant_id} do + message = %Broadcast{topic: @topic, event: "an event", payload: "some text payload"} + TenantBroadcaster.pubsub_broadcast(tenant_id, @topic, message, Phoenix.PubSub, :broadcast) - assert_receive { - :telemetry, - [:realtime, :tenants, :payload, :size], - %{size: 130}, - %{tenant: "realtime-dev"} - } + assert_receive ^message + + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} + + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 119}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + end end - test "pubsub_broadcast string payload", %{node: node} do - message = %Broadcast{topic: @topic, event: "an event", payload: "some text payload"} - TenantBroadcaster.pubsub_broadcast("realtime-dev", @topic, message, Phoenix.PubSub) + describe "pubsub_broadcast_from/6 #{pubsub_adapter}" do + @describetag pubsub_adapter: pubsub_adapter + + test "pubsub_broadcast_from", %{node: node, tenant_id: tenant_id} do + parent = self() + + spawn_link(fn -> + Endpoint.subscribe(@topic) + send(parent, :ready) - assert_receive ^message + receive do + msg -> send(parent, {:other_process, msg}) + end + end) - # Remote node received the broadcast - assert_receive {:relay, ^node, ^message} + assert_receive :ready - assert_receive { - :telemetry, - [:realtime, :tenants, :payload, :size], - %{size: 119}, - %{tenant: "realtime-dev"} - } + message = %Broadcast{topic: @topic, event: "an event", payload: %{"a" => "b"}} + + TenantBroadcaster.pubsub_broadcast_from(tenant_id, self(), @topic, message, Phoenix.PubSub, :broadcast) + + assert_receive {:other_process, ^message} + + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} + + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 114}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + + # This process does not receive the message + refute_receive _any + end end - end - describe "pubsub_broadcast_from/5" do - test "pubsub_broadcast_from", %{node: node} do - parent = self() + describe "pubsub_direct_broadcast/6 #{pubsub_adapter}" do + @describetag pubsub_adapter: pubsub_adapter + + test "pubsub_direct_broadcast", %{node: node, tenant_id: tenant_id} do + message = %Broadcast{topic: @topic, event: "an event", payload: %{"a" => "b"}} + + TenantBroadcaster.pubsub_direct_broadcast(node(), tenant_id, @topic, message, Phoenix.PubSub, :broadcast) + TenantBroadcaster.pubsub_direct_broadcast(node, tenant_id, @topic, message, Phoenix.PubSub, :broadcast) - spawn_link(fn -> - Endpoint.subscribe(@topic) - send(parent, :ready) + assert_receive ^message - receive do - msg -> send(parent, {:other_process, msg}) - end - end) + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} - assert_receive :ready + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 114}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + end - message = %Broadcast{topic: @topic, event: "an event", payload: %{"a" => "b"}} + test "pubsub_direct_broadcast list payload", %{node: node, tenant_id: tenant_id} do + message = %Broadcast{topic: @topic, event: "an event", payload: ["a", %{"b" => "c"}, 1, 23]} - TenantBroadcaster.pubsub_broadcast_from("realtime-dev", self(), @topic, message, Phoenix.PubSub) + TenantBroadcaster.pubsub_direct_broadcast(node(), tenant_id, @topic, message, Phoenix.PubSub, :broadcast) + TenantBroadcaster.pubsub_direct_broadcast(node, tenant_id, @topic, message, Phoenix.PubSub, :broadcast) - assert_receive {:other_process, ^message} + assert_receive ^message - # Remote node received the broadcast - assert_receive {:relay, ^node, ^message} + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} - assert_receive { - :telemetry, - [:realtime, :tenants, :payload, :size], - %{size: 114}, - %{tenant: "realtime-dev"} - } + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 130}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + end - # This process does not receive the message - refute_receive _any + test "pubsub_direct_broadcast string payload", %{node: node, tenant_id: tenant_id} do + message = %Broadcast{topic: @topic, event: "an event", payload: "some text payload"} + + TenantBroadcaster.pubsub_direct_broadcast(node(), tenant_id, @topic, message, Phoenix.PubSub, :broadcast) + TenantBroadcaster.pubsub_direct_broadcast(node, tenant_id, @topic, message, Phoenix.PubSub, :broadcast) + + assert_receive ^message + + # Remote node received the broadcast + assert_receive {:relay, ^node, ^message} + + assert_receive { + :telemetry, + [:realtime, :tenants, :payload, :size], + %{size: 119}, + %{tenant: ^tenant_id, message_type: :broadcast} + } + end end end - def handle_telemetry(event, measures, metadata, pid: pid), do: send(pid, {:telemetry, event, measures, metadata}) + describe "collect_payload_size/3" do + @describetag pubsub_adapter: :gen_rpc + + test "emit telemetry for struct", %{tenant_id: tenant_id} do + TenantBroadcaster.collect_payload_size( + tenant_id, + %Phoenix.Socket.Broadcast{event: "broadcast", payload: %{"a" => "b"}}, + :broadcast + ) + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 65}, + %{tenant: ^tenant_id, message_type: :broadcast}} + end + + test "emit telemetry for map", %{tenant_id: tenant_id} do + TenantBroadcaster.collect_payload_size( + tenant_id, + %{event: "broadcast", payload: %{"a" => "b"}}, + :postgres_changes + ) + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 53}, + %{tenant: ^tenant_id, message_type: :postgres_changes}} + end + + test "emit telemetry for non-map", %{tenant_id: tenant_id} do + TenantBroadcaster.collect_payload_size(tenant_id, "some blob", :presence) + + assert_receive {:telemetry, [:realtime, :tenants, :payload, :size], %{size: 15}, + %{tenant: ^tenant_id, message_type: :presence}} + end + end + + def handle_telemetry(event, measures, metadata, %{pid: pid, tenant: tenant}) do + if metadata[:tenant] == tenant do + send(pid, {:telemetry, event, measures, metadata}) + end + end end diff --git a/test/support/clustered.ex b/test/support/clustered.ex index c7028b79b..f0caa6df0 100644 --- a/test/support/clustered.ex +++ b/test/support/clustered.ex @@ -39,6 +39,7 @@ defmodule Clustered do def start_disconnected(aux_mod \\ nil, opts \\ []) do extra_config = Keyword.get(opts, :extra_config, []) phoenix_port = Keyword.get(opts, :phoenix_port, 4012) + name = Keyword.get(opts, :name, :peer.random_name()) :ok = case :net_kernel.start([:"main@127.0.0.1"]) do @@ -53,7 +54,6 @@ defmodule Clustered do end true = :erlang.set_cookie(:cookie) - name = :peer.random_name() {:ok, pid, node} = ExUnit.Callbacks.start_supervised(%{ diff --git a/test/support/containers.ex b/test/support/containers.ex index cd66f2699..51f923472 100644 --- a/test/support/containers.ex +++ b/test/support/containers.ex @@ -3,7 +3,6 @@ defmodule Containers do alias Realtime.Tenants.Connect alias Containers.Container alias Realtime.Database - alias Realtime.RateCounter alias Realtime.Tenants.Migrations use GenServer @@ -37,7 +36,13 @@ defmodule Containers do def handle_continue({:pool, max_cases}, state) do {:ok, _pid} = :poolboy.start_link( - [name: {:local, Containers.Pool}, size: max_cases + 2, max_overflow: 0, worker_module: Containers.Container], + [ + strategy: :fifo, + name: {:local, Containers.Pool}, + size: max_cases + 2, + max_overflow: 0, + worker_module: Containers.Container + ], [] ) @@ -110,11 +115,25 @@ defmodule Containers do end end + defp storage_up!(tenant) do + settings = + Database.from_tenant(tenant, "realtime_test", :stop) + |> Map.from_struct() + |> Keyword.new() + + case Ecto.Adapters.Postgres.storage_up(settings) do + :ok -> :ok + {:error, :already_up} -> :ok + _ -> raise "Failed to create database" + end + end + # Might be worth changing this to {:ok, tenant} def checkout_tenant(opts \\ []) do with container when is_pid(container) <- :poolboy.checkout(Containers.Pool, true, 5_000), port <- Container.port(container) do tenant = Generators.tenant_fixture(%{port: port, migrations_ran: 0}) + run_migrations? = Keyword.get(opts, :run_migrations, false) settings = Database.from_tenant(tenant, "realtime_test", :stop) @@ -126,9 +145,9 @@ defmodule Containers do Postgrex.query!(db_conn, "CREATE SCHEMA IF NOT EXISTS realtime", []) end) - Process.exit(conn, :normal) + storage_up!(tenant) - RateCounter.stop(tenant.external_id) + RateCounterHelper.stop(tenant.external_id) # Automatically checkin the container at the end of the test ExUnit.Callbacks.on_exit(fn -> @@ -149,6 +168,60 @@ defmodule Containers do :poolboy.checkin(Containers.Pool, container) end) + publication = "supabase_realtime_test" + + Postgrex.transaction(conn, fn db_conn -> + queries = [ + "DROP TABLE IF EXISTS public.test", + "DROP PUBLICATION IF EXISTS #{publication}", + "create sequence if not exists test_id_seq;", + """ + create table "public"."test" ( + "id" int4 not null default nextval('test_id_seq'::regclass), + "details" text, + primary key ("id")); + """, + "grant all on table public.test to anon;", + "grant all on table public.test to postgres;", + "grant all on table public.test to authenticated;", + "create publication #{publication} for all tables", + # Clean up all replication slots + """ + DO $$ + DECLARE + r RECORD; + BEGIN + FOR r IN + SELECT slot_name, active_pid + FROM pg_replication_slots + WHERE slot_name LIKE 'supabase_realtime%' + LOOP + IF r.active_pid IS NOT NULL THEN + BEGIN + -- try to terminate the backend; ignore any error or race + SELECT pg_terminate_backend(r.active_pid); + PERFORM pg_sleep(0.5); + EXCEPTION WHEN OTHERS THEN + NULL; + END; + END IF; + + BEGIN + -- check existence then try to drop; ignore any error or race + IF EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = r.slot_name) THEN + PERFORM pg_drop_replication_slot(r.slot_name); + END IF; + EXCEPTION WHEN OTHERS THEN + NULL; + END; + END LOOP; + END$$; + """ + ] + + Enum.each(queries, &Postgrex.query!(db_conn, &1, [])) + end) + tenant = if run_migrations? do case run_migrations(tenant) do @@ -156,16 +229,18 @@ defmodule Containers do # Avoiding to use Tenants.update_migrations_ran/2 because it touches Cachex and it doesn't play well with # Ecto Sandbox :ok = Migrations.create_partitions(conn) - {:ok, tenant} = Realtime.Api.update_tenant(tenant, %{migrations_ran: count}) + {:ok, tenant} = Realtime.Api.update_tenant_by_external_id(tenant.external_id, %{migrations_ran: count}) tenant - _ -> - raise "Faled to run migrations" + error -> + raise "Failed to run migrations: #{inspect(error)}" end else tenant end + GenServer.stop(conn) + tenant else _ -> {:error, "failed to checkout a container"} @@ -267,7 +342,13 @@ defmodule Containers do @image, "postgres", "-c", - "config_file=/etc/postgresql/postgresql.conf" + "config_file=/etc/postgresql/postgresql.conf", + "-c", + "wal_keep_size=32MB", + "-c", + "max_wal_size=32MB", + "-c", + "max_slot_wal_keep_size=32MB" ]) end end diff --git a/test/support/generators.ex b/test/support/generators.ex index 768e3823b..481944772 100644 --- a/test/support/generators.ex +++ b/test/support/generators.ex @@ -283,25 +283,28 @@ defmodule Generators do jwt end - @port 4003 - @serializer Phoenix.Socket.V1.JSONSerializer - - def get_connection( - tenant, - role \\ "anon", - claims \\ %{}, - params \\ %{vsn: "1.0.0", log_level: :warning} - ) do + # default test port + @port 4002 + + def get_connection(tenant, serializer \\ Phoenix.Socket.V1.JSONSerializer, opts \\ []) do + params = Keyword.get(opts, :params, %{log_level: :warning}) + claims = Keyword.get(opts, :claims, %{}) + role = Keyword.get(opts, :role, "anon") + params = Enum.reduce(params, "", fn {k, v}, acc -> "#{acc}&#{k}=#{v}" end) - uri = "#{uri(tenant)}?#{params}" + uri = "#{uri(tenant, serializer)}&#{params}" with {:ok, token} <- token_valid(tenant, role, claims), - {:ok, socket} <- WebsocketClient.connect(self(), uri, @serializer, [{"x-api-key", token}]) do + {:ok, socket} <- WebsocketClient.connect(self(), uri, serializer, [{"x-api-key", token}]) do {socket, token} end end - def uri(tenant, port \\ @port), do: "ws://#{tenant.external_id}.localhost:#{port}/socket/websocket" + def uri(tenant, serializer, port \\ @port), + do: "ws://#{tenant.external_id}.localhost:#{port}/socket/websocket?vsn=#{vsn(serializer)}" + + defp vsn(Phoenix.Socket.V1.JSONSerializer), do: "1.0.0" + defp vsn(RealtimeWeb.Socket.V2Serializer), do: "2.0.0" @spec token_valid(Tenant.t(), binary(), map()) :: {:ok, binary()} def token_valid(tenant, role, claims \\ %{}), do: generate_token(tenant, Map.put(claims, :role, role)) diff --git a/test/support/metrics_helper.ex b/test/support/metrics_helper.ex new file mode 100644 index 000000000..ca31ad91b --- /dev/null +++ b/test/support/metrics_helper.ex @@ -0,0 +1,53 @@ +defmodule MetricsHelper do + @spec search(String.t(), String.t(), map() | keyword() | nil) :: + {:ok, String.t(), map(), String.t()} | {:error, String.t()} + def search(prometheus_metrics, metric_name, expected_tags \\ nil) do + # Escape the metric_name to handle any special regex characters + escaped_name = Regex.escape(metric_name) + regex = ~r/^(?#{escaped_name})\{(?[^}]+)\}\s+(?\d+(?:\.\d+)?)$/ + + prometheus_metrics + |> IO.iodata_to_binary() + |> String.split("\n", trim: true) + |> Enum.find_value( + nil, + fn item -> + case parse(item, regex, expected_tags) do + {:ok, value} -> value + {:error, _reason} -> false + end + end + ) + |> case do + nil -> nil + number -> String.to_integer(number) + end + end + + defp parse(metric_string, regex, expected_tags) do + case Regex.named_captures(regex, metric_string) do + %{"name" => _name, "tags" => tags_string, "value" => value} -> + tags = parse_tags(tags_string) + + if expected_tags && !matching_tags(tags, expected_tags) do + {:error, "Tags do not match expected tags"} + else + {:ok, value} + end + + nil -> + {:error, "Invalid metric format or metric name mismatch"} + end + end + + defp parse_tags(tags_string) do + ~r/(?[a-zA-Z_][a-zA-Z0-9_]*)="(?[^"]*)"/ + |> Regex.scan(tags_string, capture: :all_names) + |> Enum.map(fn [key, value] -> {key, value} end) + |> Map.new() + end + + defp matching_tags(tags, expected_tags) do + Enum.all?(expected_tags, fn {k, v} -> Map.get(tags, to_string(k)) == to_string(v) end) + end +end diff --git a/test/support/rate_counter_helper.ex b/test/support/rate_counter_helper.ex new file mode 100644 index 000000000..660ec422f --- /dev/null +++ b/test/support/rate_counter_helper.ex @@ -0,0 +1,41 @@ +defmodule RateCounterHelper do + alias Realtime.RateCounter + + @spec stop(term()) :: :ok + def stop(tenant_id) do + keys = + Registry.select(Realtime.Registry.Unique, [ + {{{:"$1", :_, {:_, :_, :"$2"}}, :"$3", :_}, [{:==, :"$1", RateCounter}, {:==, :"$2", tenant_id}], [:"$_"]} + ]) + + Enum.each(keys, fn {{_, _, key}, {pid, _}} -> + if Process.alive?(pid), do: GenServer.stop(pid) + Realtime.GenCounter.delete(key) + Cachex.del!(RateCounter, key) + end) + + :ok + end + + @spec tick!(RateCounter.Args.t()) :: RateCounter.t() + def tick!(args) do + [{pid, _}] = Registry.lookup(Realtime.Registry.Unique, {RateCounter, :rate_counter, args.id}) + send(pid, :tick) + {:ok, :sys.get_state(pid)} + end + + def tick_tenant_rate_counters!(tenant_id) do + keys = + Registry.select(Realtime.Registry.Unique, [ + {{{:"$1", :_, {:_, :_, :"$2"}}, :"$3", :_}, [{:==, :"$1", RateCounter}, {:==, :"$2", tenant_id}], [:"$_"]} + ]) + + Enum.each(keys, fn {{_, _, _key}, {pid, _}} -> + send(pid, :tick) + # do a get_state to wait for the tick to be processed + :sys.get_state(pid) + end) + + :ok + end +end diff --git a/test/support/tenant_connection.ex b/test/support/tenant_connection.ex index ce5956b49..77328bdfc 100644 --- a/test/support/tenant_connection.ex +++ b/test/support/tenant_connection.ex @@ -4,17 +4,17 @@ defmodule TenantConnection do """ alias Realtime.Api.Message alias Realtime.Database - alias Realtime.Repo + alias Realtime.Tenants.Repo alias Realtime.Tenants.Connect alias RealtimeWeb.Endpoint def create_message(attrs, conn, opts \\ [mode: :savepoint]) do - channel = Message.changeset(%Message{}, attrs) + message = Message.changeset(%Message{}, attrs) {:ok, result} = Database.transaction(conn, fn transaction_conn -> - with {:ok, %Message{} = channel} <- Repo.insert(transaction_conn, channel, Message, opts) do - channel + with {:ok, %Message{} = message} <- Repo.insert(transaction_conn, message, Message, opts) do + message end end) diff --git a/test/support/test_endpoint.ex b/test/support/test_endpoint.ex deleted file mode 100644 index 67c477153..000000000 --- a/test/support/test_endpoint.ex +++ /dev/null @@ -1,26 +0,0 @@ -defmodule TestEndpoint do - use Phoenix.Endpoint, otp_app: :phoenix - - @session_config store: :cookie, - key: "_hello_key", - signing_salt: "change_me" - - socket("/socket", RealtimeWeb.UserSocket, - websocket: [ - connect_info: [:peer_data, :uri, :x_headers], - fullsweep_after: 20, - max_frame_size: 8_000_000 - ] - ) - - plug(Plug.Session, @session_config) - plug(:fetch_session) - plug(Plug.CSRFProtection) - plug(:put_session) - - defp put_session(conn, _) do - conn - |> put_session(:from_session, "123") - |> send_resp(200, Plug.CSRFProtection.get_csrf_token()) - end -end diff --git a/test/test_helper.exs b/test/test_helper.exs index 435f00ef8..767212e24 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -2,7 +2,7 @@ start_time = :os.system_time(:millisecond) alias Realtime.Api alias Realtime.Database -ExUnit.start(exclude: [:failing], max_cases: 3, capture_log: true) +ExUnit.start(exclude: [:failing], max_cases: 4, capture_log: true) max_cases = ExUnit.configuration()[:max_cases] @@ -15,7 +15,7 @@ end {:ok, _pid} = Containers.start_link(max_cases) -for tenant <- Api.list_tenants(), do: Api.delete_tenant(tenant) +for tenant <- Api.list_tenants(), do: Api.delete_tenant_by_external_id(tenant.external_id) tenant_name = "dev_tenant" tenant = Containers.initialize(tenant_name) @@ -46,17 +46,18 @@ end) Ecto.Adapters.SQL.Sandbox.mode(Realtime.Repo, :manual) -end_time = :os.system_time(:millisecond) -IO.puts("[test_helper.exs] Time to start tests: #{end_time - start_time} ms") - Mimic.copy(:syn) +Mimic.copy(Extensions.PostgresCdcRls.Replications) +Mimic.copy(Extensions.PostgresCdcRls.Subscriptions) +Mimic.copy(Realtime.Database) Mimic.copy(Realtime.GenCounter) +Mimic.copy(Realtime.GenRpc) Mimic.copy(Realtime.Nodes) +Mimic.copy(Realtime.Repo.Replica) Mimic.copy(Realtime.RateCounter) Mimic.copy(Realtime.Tenants.Authorization) Mimic.copy(Realtime.Tenants.Cache) Mimic.copy(Realtime.Tenants.Connect) -Mimic.copy(Realtime.Database) Mimic.copy(Realtime.Tenants.Migrations) Mimic.copy(Realtime.Tenants.Rebalancer) Mimic.copy(Realtime.Tenants.ReplicationConnection) @@ -64,3 +65,13 @@ Mimic.copy(RealtimeWeb.ChannelsAuthorization) Mimic.copy(RealtimeWeb.Endpoint) Mimic.copy(RealtimeWeb.JwtVerification) Mimic.copy(RealtimeWeb.TenantBroadcaster) + +# Set the node as the name we use on Clustered.start +# Also update syn metadata to reflect the new name +:net_kernel.start([:"main@127.0.0.1"]) +region = Application.get_env(:realtime, :region) +[{pid, _}] = :syn.members(RegionNodes, region) +:syn.update_member(RegionNodes, region, pid, fn _ -> [node: node()] end) + +end_time = :os.system_time(:millisecond) +IO.puts("[test_helper.exs] Time to start tests: #{end_time - start_time} ms")