Merge pull request 'Agregar dependencias de Supabase y tipos de undici, incluyendo archivos de configuración y versiones.' () from rvg-frontend into benito

Reviewed-on: 
This commit is contained in:
roberto.viveros 2025-03-03 13:48:48 +00:00
commit dc1669b5e1
248 changed files with 84772 additions and 3 deletions

139
node_modules/.package-lock.json generated vendored Normal file
View File

@ -0,0 +1,139 @@
{
"name": "Venta.De.Boletos.De.Un.Concierto",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/@supabase/auth-js": {
"version": "2.68.0",
"resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.68.0.tgz",
"integrity": "sha512-odG7nb7aOmZPUXk6SwL2JchSsn36Ppx11i2yWMIc/meUO2B2HK9YwZHPK06utD9Ql9ke7JKDbwGin/8prHKxxQ==",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/functions-js": {
"version": "2.4.4",
"resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.4.4.tgz",
"integrity": "sha512-WL2p6r4AXNGwop7iwvul2BvOtuJ1YQy8EbOd0dhG1oN1q8el/BIRSFCFnWAMM/vJJlHWLi4ad22sKbKr9mvjoA==",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/node-fetch": {
"version": "2.6.15",
"resolved": "https://registry.npmjs.org/@supabase/node-fetch/-/node-fetch-2.6.15.tgz",
"integrity": "sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
}
},
"node_modules/@supabase/postgrest-js": {
"version": "1.19.2",
"resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.19.2.tgz",
"integrity": "sha512-MXRbk4wpwhWl9IN6rIY1mR8uZCCG4MZAEji942ve6nMwIqnBgBnZhZlON6zTTs6fgveMnoCILpZv1+K91jN+ow==",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/realtime-js": {
"version": "2.11.2",
"resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.11.2.tgz",
"integrity": "sha512-u/XeuL2Y0QEhXSoIPZZwR6wMXgB+RQbJzG9VErA3VghVt7uRfSVsjeqd7m5GhX3JR6dM/WRmLbVR8URpDWG4+w==",
"dependencies": {
"@supabase/node-fetch": "^2.6.14",
"@types/phoenix": "^1.5.4",
"@types/ws": "^8.5.10",
"ws": "^8.18.0"
}
},
"node_modules/@supabase/storage-js": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.7.1.tgz",
"integrity": "sha512-asYHcyDR1fKqrMpytAS1zjyEfvxuOIp1CIXX7ji4lHHcJKqyk+sLl/Vxgm4sN6u8zvuUtae9e4kDxQP2qrwWBA==",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/supabase-js": {
"version": "2.49.1",
"resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.49.1.tgz",
"integrity": "sha512-lKaptKQB5/juEF5+jzmBeZlz69MdHZuxf+0f50NwhL+IE//m4ZnOeWlsKRjjsM0fVayZiQKqLvYdBn0RLkhGiQ==",
"dependencies": {
"@supabase/auth-js": "2.68.0",
"@supabase/functions-js": "2.4.4",
"@supabase/node-fetch": "2.6.15",
"@supabase/postgrest-js": "1.19.2",
"@supabase/realtime-js": "2.11.2",
"@supabase/storage-js": "2.7.1"
}
},
"node_modules/@types/node": {
"version": "22.13.8",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.8.tgz",
"integrity": "sha512-G3EfaZS+iOGYWLLRCEAXdWK9my08oHNZ+FHluRiggIYJPOXzhOiDgpVCUHaUvyIC5/fj7C/p637jdzC666AOKQ==",
"dependencies": {
"undici-types": "~6.20.0"
}
},
"node_modules/@types/phoenix": {
"version": "1.6.6",
"resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.6.tgz",
"integrity": "sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A=="
},
"node_modules/@types/ws": {
"version": "8.5.14",
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.14.tgz",
"integrity": "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
},
"node_modules/undici-types": {
"version": "6.20.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/ws": {
"version": "8.18.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.1.tgz",
"integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
}
}

21
node_modules/@supabase/auth-js/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Supabase
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

50
node_modules/@supabase/auth-js/README.md generated vendored Normal file
View File

@ -0,0 +1,50 @@
# `auth-js`
An isomorphic JavaScript client library for the [Supabase Auth](https://github.com/supabase/auth) API.
## Docs
- Using `auth-js`: https://supabase.com/docs/reference/javascript/auth-signup
- TypeDoc: https://supabase.github.io/auth-js/v2
## Quick start
Install
```bash
npm install --save @supabase/auth-js
```
Usage
```js
import { AuthClient } from '@supabase/auth-js'
const GOTRUE_URL = 'http://localhost:9999'
const auth = new AuthClient({ url: GOTRUE_URL })
```
- `signUp()`: https://supabase.io/docs/reference/javascript/auth-signup
- `signIn()`: https://supabase.io/docs/reference/javascript/auth-signin
- `signOut()`: https://supabase.io/docs/reference/javascript/auth-signout
### Custom `fetch` implementation
`auth-js` uses the [`cross-fetch`](https://www.npmjs.com/package/cross-fetch) library to make HTTP requests, but an alternative `fetch` implementation can be provided as an option. This is most useful in environments where `cross-fetch` is not compatible, for instance Cloudflare Workers:
```js
import { AuthClient } from '@supabase/auth-js'
const AUTH_URL = 'http://localhost:9999'
const auth = new AuthClient({ url: AUTH_URL, fetch: fetch })
```
## Sponsors
We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products dont exist we build them and open source them ourselves.
[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase)
![Watch this repo](https://gitcdn.xyz/repo/supabase/monorepo/master/web/static/watch-repo.gif 'Watch this repo')

69
node_modules/@supabase/auth-js/package.json generated vendored Normal file
View File

@ -0,0 +1,69 @@
{
"name": "@supabase/auth-js",
"version": "2.68.0",
"private": false,
"description": "Official client library for Supabase Auth",
"keywords": [
"auth",
"supabase",
"auth",
"authentication"
],
"homepage": "https://github.com/supabase/auth-js",
"bugs": "https://github.com/supabase/auth-js/issues",
"license": "MIT",
"author": "Supabase",
"files": [
"dist",
"src"
],
"main": "dist/main/index.js",
"module": "dist/module/index.js",
"types": "dist/module/index.d.ts",
"repository": "github:supabase/auth-js",
"scripts": {
"clean": "rimraf dist docs",
"coverage": "echo \"run npm test\"",
"format": "prettier --write \"{src,test}/**/*.ts\"",
"build": "genversion src/lib/version.ts --es6 && run-s clean format build:* && run-s lint",
"build:main": "tsc -p tsconfig.json",
"build:module": "tsc -p tsconfig.module.json",
"lint": "eslint ./src/**/* test/**/*.test.ts",
"test": "run-s test:clean test:infra test:suite test:clean",
"test:suite": "jest --runInBand --coverage",
"test:infra": "cd infra && docker compose down && docker compose pull && docker compose up -d && sleep 30",
"test:clean": "cd infra && docker compose down",
"docs": "typedoc src/index.ts --out docs/v2 --excludePrivate --excludeProtected",
"docs:json": "typedoc --json docs/v2/spec.json --excludeExternals --excludePrivate --excludeProtected src/index.ts"
},
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
},
"devDependencies": {
"@types/faker": "^5.1.6",
"@types/jest": "^28.1.6",
"@types/jsonwebtoken": "^8.5.6",
"@types/node": "^18.16.19",
"@types/node-fetch": "^2.6.4",
"@typescript-eslint/eslint-plugin": "^5.30.7",
"@typescript-eslint/parser": "^5.30.7",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"eslint-config-standard": "^17.0.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^6.0.0",
"faker": "^5.3.1",
"genversion": "^3.1.1",
"jest": "^28.1.3",
"jest-mock-server": "^0.1.0",
"jsonwebtoken": "^9.0.0",
"npm-run-all": "^4.1.5",
"prettier": "2.7.1",
"rimraf": "^3.0.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"ts-jest": "^28.0.7",
"typedoc": "^0.22.16",
"typescript": "^4.7.4"
}
}

5
node_modules/@supabase/auth-js/src/AuthAdminApi.ts generated vendored Normal file
View File

@ -0,0 +1,5 @@
import GoTrueAdminApi from './GoTrueAdminApi'
const AuthAdminApi = GoTrueAdminApi
export default AuthAdminApi

5
node_modules/@supabase/auth-js/src/AuthClient.ts generated vendored Normal file
View File

@ -0,0 +1,5 @@
import GoTrueClient from './GoTrueClient'
const AuthClient = GoTrueClient
export default AuthClient

333
node_modules/@supabase/auth-js/src/GoTrueAdminApi.ts generated vendored Normal file
View File

@ -0,0 +1,333 @@
import {
Fetch,
_generateLinkResponse,
_noResolveJsonResponse,
_request,
_userResponse,
} from './lib/fetch'
import { resolveFetch } from './lib/helpers'
import {
AdminUserAttributes,
GenerateLinkParams,
GenerateLinkResponse,
Pagination,
User,
UserResponse,
GoTrueAdminMFAApi,
AuthMFAAdminDeleteFactorParams,
AuthMFAAdminDeleteFactorResponse,
AuthMFAAdminListFactorsParams,
AuthMFAAdminListFactorsResponse,
PageParams,
} from './lib/types'
import { AuthError, isAuthError } from './lib/errors'
export default class GoTrueAdminApi {
/** Contains all MFA administration methods. */
mfa: GoTrueAdminMFAApi
protected url: string
protected headers: {
[key: string]: string
}
protected fetch: Fetch
constructor({
url = '',
headers = {},
fetch,
}: {
url: string
headers?: {
[key: string]: string
}
fetch?: Fetch
}) {
this.url = url
this.headers = headers
this.fetch = resolveFetch(fetch)
this.mfa = {
listFactors: this._listFactors.bind(this),
deleteFactor: this._deleteFactor.bind(this),
}
}
/**
* Removes a logged-in session.
* @param jwt A valid, logged-in JWT.
* @param scope The logout sope.
*/
async signOut(
jwt: string,
scope: 'global' | 'local' | 'others' = 'global'
): Promise<{ data: null; error: AuthError | null }> {
try {
await _request(this.fetch, 'POST', `${this.url}/logout?scope=${scope}`, {
headers: this.headers,
jwt,
noResolveJson: true,
})
return { data: null, error: null }
} catch (error) {
if (isAuthError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Sends an invite link to an email address.
* @param email The email address of the user.
* @param options Additional options to be included when inviting.
*/
async inviteUserByEmail(
email: string,
options: {
/** A custom data object to store additional metadata about the user. This maps to the `auth.users.user_metadata` column. */
data?: object
/** The URL which will be appended to the email link sent to the user's email address. Once clicked the user will end up on this URL. */
redirectTo?: string
} = {}
): Promise<UserResponse> {
try {
return await _request(this.fetch, 'POST', `${this.url}/invite`, {
body: { email, data: options.data },
headers: this.headers,
redirectTo: options.redirectTo,
xform: _userResponse,
})
} catch (error) {
if (isAuthError(error)) {
return { data: { user: null }, error }
}
throw error
}
}
/**
* Generates email links and OTPs to be sent via a custom email provider.
* @param email The user's email.
* @param options.password User password. For signup only.
* @param options.data Optional user metadata. For signup only.
* @param options.redirectTo The redirect url which should be appended to the generated link
*/
async generateLink(params: GenerateLinkParams): Promise<GenerateLinkResponse> {
try {
const { options, ...rest } = params
const body: any = { ...rest, ...options }
if ('newEmail' in rest) {
// replace newEmail with new_email in request body
body.new_email = rest?.newEmail
delete body['newEmail']
}
return await _request(this.fetch, 'POST', `${this.url}/admin/generate_link`, {
body: body,
headers: this.headers,
xform: _generateLinkResponse,
redirectTo: options?.redirectTo,
})
} catch (error) {
if (isAuthError(error)) {
return {
data: {
properties: null,
user: null,
},
error,
}
}
throw error
}
}
// User Admin API
/**
* Creates a new user.
* This function should only be called on a server. Never expose your `service_role` key in the browser.
*/
async createUser(attributes: AdminUserAttributes): Promise<UserResponse> {
try {
return await _request(this.fetch, 'POST', `${this.url}/admin/users`, {
body: attributes,
headers: this.headers,
xform: _userResponse,
})
} catch (error) {
if (isAuthError(error)) {
return { data: { user: null }, error }
}
throw error
}
}
/**
* Get a list of users.
*
* This function should only be called on a server. Never expose your `service_role` key in the browser.
* @param params An object which supports `page` and `perPage` as numbers, to alter the paginated results.
*/
async listUsers(
params?: PageParams
): Promise<
| { data: { users: User[]; aud: string } & Pagination; error: null }
| { data: { users: [] }; error: AuthError }
> {
try {
const pagination: Pagination = { nextPage: null, lastPage: 0, total: 0 }
const response = await _request(this.fetch, 'GET', `${this.url}/admin/users`, {
headers: this.headers,
noResolveJson: true,
query: {
page: params?.page?.toString() ?? '',
per_page: params?.perPage?.toString() ?? '',
},
xform: _noResolveJsonResponse,
})
if (response.error) throw response.error
const users = await response.json()
const total = response.headers.get('x-total-count') ?? 0
const links = response.headers.get('link')?.split(',') ?? []
if (links.length > 0) {
links.forEach((link: string) => {
const page = parseInt(link.split(';')[0].split('=')[1].substring(0, 1))
const rel = JSON.parse(link.split(';')[1].split('=')[1])
pagination[`${rel}Page`] = page
})
pagination.total = parseInt(total)
}
return { data: { ...users, ...pagination }, error: null }
} catch (error) {
if (isAuthError(error)) {
return { data: { users: [] }, error }
}
throw error
}
}
/**
* Get user by id.
*
* @param uid The user's unique identifier
*
* This function should only be called on a server. Never expose your `service_role` key in the browser.
*/
async getUserById(uid: string): Promise<UserResponse> {
try {
return await _request(this.fetch, 'GET', `${this.url}/admin/users/${uid}`, {
headers: this.headers,
xform: _userResponse,
})
} catch (error) {
if (isAuthError(error)) {
return { data: { user: null }, error }
}
throw error
}
}
/**
* Updates the user data.
*
* @param attributes The data you want to update.
*
* This function should only be called on a server. Never expose your `service_role` key in the browser.
*/
async updateUserById(uid: string, attributes: AdminUserAttributes): Promise<UserResponse> {
try {
return await _request(this.fetch, 'PUT', `${this.url}/admin/users/${uid}`, {
body: attributes,
headers: this.headers,
xform: _userResponse,
})
} catch (error) {
if (isAuthError(error)) {
return { data: { user: null }, error }
}
throw error
}
}
/**
* Delete a user. Requires a `service_role` key.
*
* @param id The user id you want to remove.
* @param shouldSoftDelete If true, then the user will be soft-deleted from the auth schema. Soft deletion allows user identification from the hashed user ID but is not reversible.
* Defaults to false for backward compatibility.
*
* This function should only be called on a server. Never expose your `service_role` key in the browser.
*/
async deleteUser(id: string, shouldSoftDelete = false): Promise<UserResponse> {
try {
return await _request(this.fetch, 'DELETE', `${this.url}/admin/users/${id}`, {
headers: this.headers,
body: {
should_soft_delete: shouldSoftDelete,
},
xform: _userResponse,
})
} catch (error) {
if (isAuthError(error)) {
return { data: { user: null }, error }
}
throw error
}
}
private async _listFactors(
params: AuthMFAAdminListFactorsParams
): Promise<AuthMFAAdminListFactorsResponse> {
try {
const { data, error } = await _request(
this.fetch,
'GET',
`${this.url}/admin/users/${params.userId}/factors`,
{
headers: this.headers,
xform: (factors: any) => {
return { data: { factors }, error: null }
},
}
)
return { data, error }
} catch (error) {
if (isAuthError(error)) {
return { data: null, error }
}
throw error
}
}
private async _deleteFactor(
params: AuthMFAAdminDeleteFactorParams
): Promise<AuthMFAAdminDeleteFactorResponse> {
try {
const data = await _request(
this.fetch,
'DELETE',
`${this.url}/admin/users/${params.userId}/factors/${params.id}`,
{
headers: this.headers,
}
)
return { data, error: null }
} catch (error) {
if (isAuthError(error)) {
return { data: null, error }
}
throw error
}
}
}

2602
node_modules/@supabase/auth-js/src/GoTrueClient.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

12
node_modules/@supabase/auth-js/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,12 @@
import GoTrueAdminApi from './GoTrueAdminApi'
import GoTrueClient from './GoTrueClient'
import AuthAdminApi from './AuthAdminApi'
import AuthClient from './AuthClient'
export { GoTrueAdminApi, GoTrueClient, AuthAdminApi, AuthClient }
export * from './lib/types'
export * from './lib/errors'
export {
navigatorLock,
NavigatorLockAcquireTimeoutError,
internals as lockInternals,
} from './lib/locks'

30
node_modules/@supabase/auth-js/src/lib/constants.ts generated vendored Normal file
View File

@ -0,0 +1,30 @@
import { version } from './version'
/** Current session will be checked for refresh at this interval. */
export const AUTO_REFRESH_TICK_DURATION_MS = 30 * 1000
/**
* A token refresh will be attempted this many ticks before the current session expires. */
export const AUTO_REFRESH_TICK_THRESHOLD = 3
/*
* Earliest time before an access token expires that the session should be refreshed.
*/
export const EXPIRY_MARGIN_MS = AUTO_REFRESH_TICK_THRESHOLD * AUTO_REFRESH_TICK_DURATION_MS
export const GOTRUE_URL = 'http://localhost:9999'
export const STORAGE_KEY = 'supabase.auth.token'
export const AUDIENCE = ''
export const DEFAULT_HEADERS = { 'X-Client-Info': `gotrue-js/${version}` }
export const NETWORK_FAILURE = {
MAX_RETRIES: 10,
RETRY_INTERVAL: 2, // in deciseconds
}
export const API_VERSION_HEADER_NAME = 'X-Supabase-Api-Version'
export const API_VERSIONS = {
'2024-01-01': {
timestamp: Date.parse('2024-01-01T00:00:00.0Z'),
name: '2024-01-01',
},
}

90
node_modules/@supabase/auth-js/src/lib/error-codes.ts generated vendored Normal file
View File

@ -0,0 +1,90 @@
/**
* Known error codes. Note that the server may also return other error codes
* not included in this list (if the client library is older than the version
* on the server).
*/
export type ErrorCode =
| 'unexpected_failure'
| 'validation_failed'
| 'bad_json'
| 'email_exists'
| 'phone_exists'
| 'bad_jwt'
| 'not_admin'
| 'no_authorization'
| 'user_not_found'
| 'session_not_found'
| 'session_expired'
| 'refresh_token_not_found'
| 'refresh_token_already_used'
| 'flow_state_not_found'
| 'flow_state_expired'
| 'signup_disabled'
| 'user_banned'
| 'provider_email_needs_verification'
| 'invite_not_found'
| 'bad_oauth_state'
| 'bad_oauth_callback'
| 'oauth_provider_not_supported'
| 'unexpected_audience'
| 'single_identity_not_deletable'
| 'email_conflict_identity_not_deletable'
| 'identity_already_exists'
| 'email_provider_disabled'
| 'phone_provider_disabled'
| 'too_many_enrolled_mfa_factors'
| 'mfa_factor_name_conflict'
| 'mfa_factor_not_found'
| 'mfa_ip_address_mismatch'
| 'mfa_challenge_expired'
| 'mfa_verification_failed'
| 'mfa_verification_rejected'
| 'insufficient_aal'
| 'captcha_failed'
| 'saml_provider_disabled'
| 'manual_linking_disabled'
| 'sms_send_failed'
| 'email_not_confirmed'
| 'phone_not_confirmed'
| 'reauth_nonce_missing'
| 'saml_relay_state_not_found'
| 'saml_relay_state_expired'
| 'saml_idp_not_found'
| 'saml_assertion_no_user_id'
| 'saml_assertion_no_email'
| 'user_already_exists'
| 'sso_provider_not_found'
| 'saml_metadata_fetch_failed'
| 'saml_idp_already_exists'
| 'sso_domain_already_exists'
| 'saml_entity_id_mismatch'
| 'conflict'
| 'provider_disabled'
| 'user_sso_managed'
| 'reauthentication_needed'
| 'same_password'
| 'reauthentication_not_valid'
| 'otp_expired'
| 'otp_disabled'
| 'identity_not_found'
| 'weak_password'
| 'over_request_rate_limit'
| 'over_email_send_rate_limit'
| 'over_sms_send_rate_limit'
| 'bad_code_verifier'
| 'anonymous_provider_disabled'
| 'hook_timeout'
| 'hook_timeout_after_retry'
| 'hook_payload_over_size_limit'
| 'hook_payload_invalid_content_type'
| 'request_timeout'
| 'mfa_phone_enroll_not_enabled'
| 'mfa_phone_verify_not_enabled'
| 'mfa_totp_enroll_not_enabled'
| 'mfa_totp_verify_not_enabled'
| 'mfa_webauthn_enroll_not_enabled'
| 'mfa_webauthn_verify_not_enabled'
| 'mfa_verified_factor_exists'
| 'invalid_credentials'
| 'email_address_not_authorized'
| 'email_address_invalid'

159
node_modules/@supabase/auth-js/src/lib/errors.ts generated vendored Normal file
View File

@ -0,0 +1,159 @@
import { WeakPasswordReasons } from './types'
import { ErrorCode } from './error-codes'
export class AuthError extends Error {
/**
* Error code associated with the error. Most errors coming from
* HTTP responses will have a code, though some errors that occur
* before a response is received will not have one present. In that
* case {@link #status} will also be undefined.
*/
code: ErrorCode | (string & {}) | undefined
/** HTTP status code that caused the error. */
status: number | undefined
protected __isAuthError = true
constructor(message: string, status?: number, code?: string) {
super(message)
this.name = 'AuthError'
this.status = status
this.code = code
}
}
export function isAuthError(error: unknown): error is AuthError {
return typeof error === 'object' && error !== null && '__isAuthError' in error
}
export class AuthApiError extends AuthError {
status: number
constructor(message: string, status: number, code: string | undefined) {
super(message, status, code)
this.name = 'AuthApiError'
this.status = status
this.code = code
}
}
export function isAuthApiError(error: unknown): error is AuthApiError {
return isAuthError(error) && error.name === 'AuthApiError'
}
export class AuthUnknownError extends AuthError {
originalError: unknown
constructor(message: string, originalError: unknown) {
super(message)
this.name = 'AuthUnknownError'
this.originalError = originalError
}
}
export class CustomAuthError extends AuthError {
name: string
status: number
constructor(message: string, name: string, status: number, code: string | undefined) {
super(message, status, code)
this.name = name
this.status = status
}
}
export class AuthSessionMissingError extends CustomAuthError {
constructor() {
super('Auth session missing!', 'AuthSessionMissingError', 400, undefined)
}
}
export function isAuthSessionMissingError(error: any): error is AuthSessionMissingError {
return isAuthError(error) && error.name === 'AuthSessionMissingError'
}
export class AuthInvalidTokenResponseError extends CustomAuthError {
constructor() {
super('Auth session or user missing', 'AuthInvalidTokenResponseError', 500, undefined)
}
}
export class AuthInvalidCredentialsError extends CustomAuthError {
constructor(message: string) {
super(message, 'AuthInvalidCredentialsError', 400, undefined)
}
}
export class AuthImplicitGrantRedirectError extends CustomAuthError {
details: { error: string; code: string } | null = null
constructor(message: string, details: { error: string; code: string } | null = null) {
super(message, 'AuthImplicitGrantRedirectError', 500, undefined)
this.details = details
}
toJSON() {
return {
name: this.name,
message: this.message,
status: this.status,
details: this.details,
}
}
}
export function isAuthImplicitGrantRedirectError(
error: any
): error is AuthImplicitGrantRedirectError {
return isAuthError(error) && error.name === 'AuthImplicitGrantRedirectError'
}
export class AuthPKCEGrantCodeExchangeError extends CustomAuthError {
details: { error: string; code: string } | null = null
constructor(message: string, details: { error: string; code: string } | null = null) {
super(message, 'AuthPKCEGrantCodeExchangeError', 500, undefined)
this.details = details
}
toJSON() {
return {
name: this.name,
message: this.message,
status: this.status,
details: this.details,
}
}
}
export class AuthRetryableFetchError extends CustomAuthError {
constructor(message: string, status: number) {
super(message, 'AuthRetryableFetchError', status, undefined)
}
}
export function isAuthRetryableFetchError(error: unknown): error is AuthRetryableFetchError {
return isAuthError(error) && error.name === 'AuthRetryableFetchError'
}
/**
* This error is thrown on certain methods when the password used is deemed
* weak. Inspect the reasons to identify what password strength rules are
* inadequate.
*/
export class AuthWeakPasswordError extends CustomAuthError {
/**
* Reasons why the password is deemed weak.
*/
reasons: WeakPasswordReasons[]
constructor(message: string, status: number, reasons: string[]) {
super(message, 'AuthWeakPasswordError', status, 'weak_password')
this.reasons = reasons
}
}
export function isAuthWeakPasswordError(error: unknown): error is AuthWeakPasswordError {
return isAuthError(error) && error.name === 'AuthWeakPasswordError'
}

283
node_modules/@supabase/auth-js/src/lib/fetch.ts generated vendored Normal file
View File

@ -0,0 +1,283 @@
import { API_VERSIONS, API_VERSION_HEADER_NAME } from './constants'
import { expiresAt, looksLikeFetchResponse, parseResponseAPIVersion } from './helpers'
import {
AuthResponse,
AuthResponsePassword,
SSOResponse,
GenerateLinkProperties,
GenerateLinkResponse,
User,
UserResponse,
} from './types'
import {
AuthApiError,
AuthRetryableFetchError,
AuthWeakPasswordError,
AuthUnknownError,
AuthSessionMissingError,
} from './errors'
export type Fetch = typeof fetch
export interface FetchOptions {
headers?: {
[key: string]: string
}
noResolveJson?: boolean
}
export interface FetchParameters {
signal?: AbortSignal
}
export type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE'
const _getErrorMessage = (err: any): string =>
err.msg || err.message || err.error_description || err.error || JSON.stringify(err)
const NETWORK_ERROR_CODES = [502, 503, 504]
export async function handleError(error: unknown) {
if (!looksLikeFetchResponse(error)) {
throw new AuthRetryableFetchError(_getErrorMessage(error), 0)
}
if (NETWORK_ERROR_CODES.includes(error.status)) {
// status in 500...599 range - server had an error, request might be retryed.
throw new AuthRetryableFetchError(_getErrorMessage(error), error.status)
}
let data: any
try {
data = await error.json()
} catch (e: any) {
throw new AuthUnknownError(_getErrorMessage(e), e)
}
let errorCode: string | undefined = undefined
const responseAPIVersion = parseResponseAPIVersion(error)
if (
responseAPIVersion &&
responseAPIVersion.getTime() >= API_VERSIONS['2024-01-01'].timestamp &&
typeof data === 'object' &&
data &&
typeof data.code === 'string'
) {
errorCode = data.code
} else if (typeof data === 'object' && data && typeof data.error_code === 'string') {
errorCode = data.error_code
}
if (!errorCode) {
// Legacy support for weak password errors, when there were no error codes
if (
typeof data === 'object' &&
data &&
typeof data.weak_password === 'object' &&
data.weak_password &&
Array.isArray(data.weak_password.reasons) &&
data.weak_password.reasons.length &&
data.weak_password.reasons.reduce((a: boolean, i: any) => a && typeof i === 'string', true)
) {
throw new AuthWeakPasswordError(
_getErrorMessage(data),
error.status,
data.weak_password.reasons
)
}
} else if (errorCode === 'weak_password') {
throw new AuthWeakPasswordError(
_getErrorMessage(data),
error.status,
data.weak_password?.reasons || []
)
} else if (errorCode === 'session_not_found') {
// The `session_id` inside the JWT does not correspond to a row in the
// `sessions` table. This usually means the user has signed out, has been
// deleted, or their session has somehow been terminated.
throw new AuthSessionMissingError()
}
throw new AuthApiError(_getErrorMessage(data), error.status || 500, errorCode)
}
const _getRequestParams = (
method: RequestMethodType,
options?: FetchOptions,
parameters?: FetchParameters,
body?: object
) => {
const params: { [k: string]: any } = { method, headers: options?.headers || {} }
if (method === 'GET') {
return params
}
params.headers = { 'Content-Type': 'application/json;charset=UTF-8', ...options?.headers }
params.body = JSON.stringify(body)
return { ...params, ...parameters }
}
interface GotrueRequestOptions extends FetchOptions {
jwt?: string
redirectTo?: string
body?: object
query?: { [key: string]: string }
/**
* Function that transforms api response from gotrue into a desirable / standardised format
*/
xform?: (data: any) => any
}
export async function _request(
fetcher: Fetch,
method: RequestMethodType,
url: string,
options?: GotrueRequestOptions
) {
const headers = {
...options?.headers,
}
if (!headers[API_VERSION_HEADER_NAME]) {
headers[API_VERSION_HEADER_NAME] = API_VERSIONS['2024-01-01'].name
}
if (options?.jwt) {
headers['Authorization'] = `Bearer ${options.jwt}`
}
const qs = options?.query ?? {}
if (options?.redirectTo) {
qs['redirect_to'] = options.redirectTo
}
const queryString = Object.keys(qs).length ? '?' + new URLSearchParams(qs).toString() : ''
const data = await _handleRequest(
fetcher,
method,
url + queryString,
{
headers,
noResolveJson: options?.noResolveJson,
},
{},
options?.body
)
return options?.xform ? options?.xform(data) : { data: { ...data }, error: null }
}
async function _handleRequest(
fetcher: Fetch,
method: RequestMethodType,
url: string,
options?: FetchOptions,
parameters?: FetchParameters,
body?: object
): Promise<any> {
const requestParams = _getRequestParams(method, options, parameters, body)
let result: any
try {
result = await fetcher(url, {
...requestParams,
})
} catch (e) {
console.error(e)
// fetch failed, likely due to a network or CORS error
throw new AuthRetryableFetchError(_getErrorMessage(e), 0)
}
if (!result.ok) {
await handleError(result)
}
if (options?.noResolveJson) {
return result
}
try {
return await result.json()
} catch (e: any) {
await handleError(e)
}
}
export function _sessionResponse(data: any): AuthResponse {
let session = null
if (hasSession(data)) {
session = { ...data }
if (!data.expires_at) {
session.expires_at = expiresAt(data.expires_in)
}
}
const user: User = data.user ?? (data as User)
return { data: { session, user }, error: null }
}
export function _sessionResponsePassword(data: any): AuthResponsePassword {
const response = _sessionResponse(data) as AuthResponsePassword
if (
!response.error &&
data.weak_password &&
typeof data.weak_password === 'object' &&
Array.isArray(data.weak_password.reasons) &&
data.weak_password.reasons.length &&
data.weak_password.message &&
typeof data.weak_password.message === 'string' &&
data.weak_password.reasons.reduce((a: boolean, i: any) => a && typeof i === 'string', true)
) {
response.data.weak_password = data.weak_password
}
return response
}
export function _userResponse(data: any): UserResponse {
const user: User = data.user ?? (data as User)
return { data: { user }, error: null }
}
export function _ssoResponse(data: any): SSOResponse {
return { data, error: null }
}
export function _generateLinkResponse(data: any): GenerateLinkResponse {
const { action_link, email_otp, hashed_token, redirect_to, verification_type, ...rest } = data
const properties: GenerateLinkProperties = {
action_link,
email_otp,
hashed_token,
redirect_to,
verification_type,
}
const user: User = { ...rest }
return {
data: {
properties,
user,
},
error: null,
}
}
export function _noResolveJsonResponse(data: any): Response {
return data
}
/**
* hasSession checks if the response object contains a valid session
* @param data A response object
* @returns true if a session is in the response
*/
function hasSession(data: any): boolean {
return data.access_token && data.refresh_token && data.expires_in
}

346
node_modules/@supabase/auth-js/src/lib/helpers.ts generated vendored Normal file
View File

@ -0,0 +1,346 @@
import { API_VERSION_HEADER_NAME } from './constants'
import { SupportedStorage } from './types'
export function expiresAt(expiresIn: number) {
const timeNow = Math.round(Date.now() / 1000)
return timeNow + expiresIn
}
export function uuid() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = (Math.random() * 16) | 0,
v = c == 'x' ? r : (r & 0x3) | 0x8
return v.toString(16)
})
}
export const isBrowser = () => typeof window !== 'undefined' && typeof document !== 'undefined'
const localStorageWriteTests = {
tested: false,
writable: false,
}
/**
* Checks whether localStorage is supported on this browser.
*/
export const supportsLocalStorage = () => {
if (!isBrowser()) {
return false
}
try {
if (typeof globalThis.localStorage !== 'object') {
return false
}
} catch (e) {
// DOM exception when accessing `localStorage`
return false
}
if (localStorageWriteTests.tested) {
return localStorageWriteTests.writable
}
const randomKey = `lswt-${Math.random()}${Math.random()}`
try {
globalThis.localStorage.setItem(randomKey, randomKey)
globalThis.localStorage.removeItem(randomKey)
localStorageWriteTests.tested = true
localStorageWriteTests.writable = true
} catch (e) {
// localStorage can't be written to
// https://www.chromium.org/for-testers/bug-reporting-guidelines/uncaught-securityerror-failed-to-read-the-localstorage-property-from-window-access-is-denied-for-this-document
localStorageWriteTests.tested = true
localStorageWriteTests.writable = false
}
return localStorageWriteTests.writable
}
/**
* Extracts parameters encoded in the URL both in the query and fragment.
*/
export function parseParametersFromURL(href: string) {
const result: { [parameter: string]: string } = {}
const url = new URL(href)
if (url.hash && url.hash[0] === '#') {
try {
const hashSearchParams = new URLSearchParams(url.hash.substring(1))
hashSearchParams.forEach((value, key) => {
result[key] = value
})
} catch (e: any) {
// hash is not a query string
}
}
// search parameters take precedence over hash parameters
url.searchParams.forEach((value, key) => {
result[key] = value
})
return result
}
type Fetch = typeof fetch
export const resolveFetch = (customFetch?: Fetch): Fetch => {
let _fetch: Fetch
if (customFetch) {
_fetch = customFetch
} else if (typeof fetch === 'undefined') {
_fetch = (...args) =>
import('@supabase/node-fetch' as any).then(({ default: fetch }) => fetch(...args))
} else {
_fetch = fetch
}
return (...args) => _fetch(...args)
}
export const looksLikeFetchResponse = (maybeResponse: unknown): maybeResponse is Response => {
return (
typeof maybeResponse === 'object' &&
maybeResponse !== null &&
'status' in maybeResponse &&
'ok' in maybeResponse &&
'json' in maybeResponse &&
typeof (maybeResponse as any).json === 'function'
)
}
// Storage helpers
export const setItemAsync = async (
storage: SupportedStorage,
key: string,
data: any
): Promise<void> => {
await storage.setItem(key, JSON.stringify(data))
}
export const getItemAsync = async (storage: SupportedStorage, key: string): Promise<unknown> => {
const value = await storage.getItem(key)
if (!value) {
return null
}
try {
return JSON.parse(value)
} catch {
return value
}
}
export const removeItemAsync = async (storage: SupportedStorage, key: string): Promise<void> => {
await storage.removeItem(key)
}
export function decodeBase64URL(value: string): string {
const key = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='
let base64 = ''
let chr1, chr2, chr3
let enc1, enc2, enc3, enc4
let i = 0
value = value.replace('-', '+').replace('_', '/')
while (i < value.length) {
enc1 = key.indexOf(value.charAt(i++))
enc2 = key.indexOf(value.charAt(i++))
enc3 = key.indexOf(value.charAt(i++))
enc4 = key.indexOf(value.charAt(i++))
chr1 = (enc1 << 2) | (enc2 >> 4)
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
chr3 = ((enc3 & 3) << 6) | enc4
base64 = base64 + String.fromCharCode(chr1)
if (enc3 != 64 && chr2 != 0) {
base64 = base64 + String.fromCharCode(chr2)
}
if (enc4 != 64 && chr3 != 0) {
base64 = base64 + String.fromCharCode(chr3)
}
}
return base64
}
/**
* A deferred represents some asynchronous work that is not yet finished, which
* may or may not culminate in a value.
* Taken from: https://github.com/mike-north/types/blob/master/src/async.ts
*/
export class Deferred<T = any> {
public static promiseConstructor: PromiseConstructor = Promise
public readonly promise!: PromiseLike<T>
public readonly resolve!: (value?: T | PromiseLike<T>) => void
public readonly reject!: (reason?: any) => any
public constructor() {
// eslint-disable-next-line @typescript-eslint/no-extra-semi
;(this as any).promise = new Deferred.promiseConstructor((res, rej) => {
// eslint-disable-next-line @typescript-eslint/no-extra-semi
;(this as any).resolve = res
// eslint-disable-next-line @typescript-eslint/no-extra-semi
;(this as any).reject = rej
})
}
}
// Taken from: https://stackoverflow.com/questions/38552003/how-to-decode-jwt-token-in-javascript-without-using-a-library
export function decodeJWTPayload(token: string) {
// Regex checks for base64url format
const base64UrlRegex = /^([a-z0-9_-]{4})*($|[a-z0-9_-]{3}=?$|[a-z0-9_-]{2}(==)?$)$/i
const parts = token.split('.')
if (parts.length !== 3) {
throw new Error('JWT is not valid: not a JWT structure')
}
if (!base64UrlRegex.test(parts[1])) {
throw new Error('JWT is not valid: payload is not in base64url format')
}
const base64Url = parts[1]
return JSON.parse(decodeBase64URL(base64Url))
}
/**
* Creates a promise that resolves to null after some time.
*/
export async function sleep(time: number): Promise<null> {
return await new Promise((accept) => {
setTimeout(() => accept(null), time)
})
}
/**
* Converts the provided async function into a retryable function. Each result
* or thrown error is sent to the isRetryable function which should return true
* if the function should run again.
*/
export function retryable<T>(
fn: (attempt: number) => Promise<T>,
isRetryable: (attempt: number, error: any | null, result?: T) => boolean
): Promise<T> {
const promise = new Promise<T>((accept, reject) => {
// eslint-disable-next-line @typescript-eslint/no-extra-semi
;(async () => {
for (let attempt = 0; attempt < Infinity; attempt++) {
try {
const result = await fn(attempt)
if (!isRetryable(attempt, null, result)) {
accept(result)
return
}
} catch (e: any) {
if (!isRetryable(attempt, e)) {
reject(e)
return
}
}
}
})()
})
return promise
}
function dec2hex(dec: number) {
return ('0' + dec.toString(16)).substr(-2)
}
// Functions below taken from: https://stackoverflow.com/questions/63309409/creating-a-code-verifier-and-challenge-for-pkce-auth-on-spotify-api-in-reactjs
export function generatePKCEVerifier() {
const verifierLength = 56
const array = new Uint32Array(verifierLength)
if (typeof crypto === 'undefined') {
const charSet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~'
const charSetLen = charSet.length
let verifier = ''
for (let i = 0; i < verifierLength; i++) {
verifier += charSet.charAt(Math.floor(Math.random() * charSetLen))
}
return verifier
}
crypto.getRandomValues(array)
return Array.from(array, dec2hex).join('')
}
async function sha256(randomString: string) {
const encoder = new TextEncoder()
const encodedData = encoder.encode(randomString)
const hash = await crypto.subtle.digest('SHA-256', encodedData)
const bytes = new Uint8Array(hash)
return Array.from(bytes)
.map((c) => String.fromCharCode(c))
.join('')
}
function base64urlencode(str: string) {
return btoa(str).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '')
}
export async function generatePKCEChallenge(verifier: string) {
const hasCryptoSupport =
typeof crypto !== 'undefined' &&
typeof crypto.subtle !== 'undefined' &&
typeof TextEncoder !== 'undefined'
if (!hasCryptoSupport) {
console.warn(
'WebCrypto API is not supported. Code challenge method will default to use plain instead of sha256.'
)
return verifier
}
const hashed = await sha256(verifier)
return base64urlencode(hashed)
}
export async function getCodeChallengeAndMethod(
storage: SupportedStorage,
storageKey: string,
isPasswordRecovery = false
) {
const codeVerifier = generatePKCEVerifier()
let storedCodeVerifier = codeVerifier
if (isPasswordRecovery) {
storedCodeVerifier += '/PASSWORD_RECOVERY'
}
await setItemAsync(storage, `${storageKey}-code-verifier`, storedCodeVerifier)
const codeChallenge = await generatePKCEChallenge(codeVerifier)
const codeChallengeMethod = codeVerifier === codeChallenge ? 'plain' : 's256'
return [codeChallenge, codeChallengeMethod]
}
/** Parses the API version which is 2YYY-MM-DD. */
const API_VERSION_REGEX = /^2[0-9]{3}-(0[1-9]|1[0-2])-(0[1-9]|1[0-9]|2[0-9]|3[0-1])$/i
export function parseResponseAPIVersion(response: Response) {
const apiVersion = response.headers.get(API_VERSION_HEADER_NAME)
if (!apiVersion) {
return null
}
if (!apiVersion.match(API_VERSION_REGEX)) {
return null
}
try {
const date = new Date(`${apiVersion}T00:00:00.0Z`)
return date
} catch (e: any) {
return null
}
}

View File

@ -0,0 +1,49 @@
import { supportsLocalStorage } from './helpers'
import { SupportedStorage } from './types'
/**
* Provides safe access to the globalThis.localStorage property.
*/
export const localStorageAdapter: SupportedStorage = {
getItem: (key) => {
if (!supportsLocalStorage()) {
return null
}
return globalThis.localStorage.getItem(key)
},
setItem: (key, value) => {
if (!supportsLocalStorage()) {
return
}
globalThis.localStorage.setItem(key, value)
},
removeItem: (key) => {
if (!supportsLocalStorage()) {
return
}
globalThis.localStorage.removeItem(key)
},
}
/**
* Returns a localStorage-like object that stores the key-value pairs in
* memory.
*/
export function memoryLocalStorageAdapter(store: { [key: string]: string } = {}): SupportedStorage {
return {
getItem: (key) => {
return store[key] || null
},
setItem: (key, value) => {
store[key] = value
},
removeItem: (key) => {
delete store[key]
},
}
}

225
node_modules/@supabase/auth-js/src/lib/locks.ts generated vendored Normal file
View File

@ -0,0 +1,225 @@
import { supportsLocalStorage } from './helpers'
/**
* @experimental
*/
export const internals = {
/**
* @experimental
*/
debug: !!(
globalThis &&
supportsLocalStorage() &&
globalThis.localStorage &&
globalThis.localStorage.getItem('supabase.gotrue-js.locks.debug') === 'true'
),
}
/**
* An error thrown when a lock cannot be acquired after some amount of time.
*
* Use the {@link #isAcquireTimeout} property instead of checking with `instanceof`.
*/
export abstract class LockAcquireTimeoutError extends Error {
public readonly isAcquireTimeout = true
constructor(message: string) {
super(message)
}
}
export class NavigatorLockAcquireTimeoutError extends LockAcquireTimeoutError {}
export class ProcessLockAcquireTimeoutError extends LockAcquireTimeoutError {}
/**
* Implements a global exclusive lock using the Navigator LockManager API. It
* is available on all browsers released after 2022-03-15 with Safari being the
* last one to release support. If the API is not available, this function will
* throw. Make sure you check availablility before configuring {@link
* GoTrueClient}.
*
* You can turn on debugging by setting the `supabase.gotrue-js.locks.debug`
* local storage item to `true`.
*
* Internals:
*
* Since the LockManager API does not preserve stack traces for the async
* function passed in the `request` method, a trick is used where acquiring the
* lock releases a previously started promise to run the operation in the `fn`
* function. The lock waits for that promise to finish (with or without error),
* while the function will finally wait for the result anyway.
*
* @param name Name of the lock to be acquired.
* @param acquireTimeout If negative, no timeout. If 0 an error is thrown if
* the lock can't be acquired without waiting. If positive, the lock acquire
* will time out after so many milliseconds. An error is
* a timeout if it has `isAcquireTimeout` set to true.
* @param fn The operation to run once the lock is acquired.
*/
export async function navigatorLock<R>(
name: string,
acquireTimeout: number,
fn: () => Promise<R>
): Promise<R> {
if (internals.debug) {
console.log('@supabase/gotrue-js: navigatorLock: acquire lock', name, acquireTimeout)
}
const abortController = new globalThis.AbortController()
if (acquireTimeout > 0) {
setTimeout(() => {
abortController.abort()
if (internals.debug) {
console.log('@supabase/gotrue-js: navigatorLock acquire timed out', name)
}
}, acquireTimeout)
}
// MDN article: https://developer.mozilla.org/en-US/docs/Web/API/LockManager/request
// Wrapping navigator.locks.request() with a plain Promise is done as some
// libraries like zone.js patch the Promise object to track the execution
// context. However, it appears that most browsers use an internal promise
// implementation when using the navigator.locks.request() API causing them
// to lose context and emit confusing log messages or break certain features.
// This wrapping is believed to help zone.js track the execution context
// better.
return await Promise.resolve().then(() =>
globalThis.navigator.locks.request(
name,
acquireTimeout === 0
? {
mode: 'exclusive',
ifAvailable: true,
}
: {
mode: 'exclusive',
signal: abortController.signal,
},
async (lock) => {
if (lock) {
if (internals.debug) {
console.log('@supabase/gotrue-js: navigatorLock: acquired', name, lock.name)
}
try {
return await fn()
} finally {
if (internals.debug) {
console.log('@supabase/gotrue-js: navigatorLock: released', name, lock.name)
}
}
} else {
if (acquireTimeout === 0) {
if (internals.debug) {
console.log('@supabase/gotrue-js: navigatorLock: not immediately available', name)
}
throw new NavigatorLockAcquireTimeoutError(
`Acquiring an exclusive Navigator LockManager lock "${name}" immediately failed`
)
} else {
if (internals.debug) {
try {
const result = await globalThis.navigator.locks.query()
console.log(
'@supabase/gotrue-js: Navigator LockManager state',
JSON.stringify(result, null, ' ')
)
} catch (e: any) {
console.warn(
'@supabase/gotrue-js: Error when querying Navigator LockManager state',
e
)
}
}
// Browser is not following the Navigator LockManager spec, it
// returned a null lock when we didn't use ifAvailable. So we can
// pretend the lock is acquired in the name of backward compatibility
// and user experience and just run the function.
console.warn(
'@supabase/gotrue-js: Navigator LockManager returned a null lock when using #request without ifAvailable set to true, it appears this browser is not following the LockManager spec https://developer.mozilla.org/en-US/docs/Web/API/LockManager/request'
)
return await fn()
}
}
}
)
)
}
const PROCESS_LOCKS: { [name: string]: Promise<any> } = {}
/**
* Implements a global exclusive lock that works only in the current process.
* Useful for environments like React Native or other non-browser
* single-process (i.e. no concept of "tabs") environments.
*
* Use {@link #navigatorLock} in browser environments.
*
* @param name Name of the lock to be acquired.
* @param acquireTimeout If negative, no timeout. If 0 an error is thrown if
* the lock can't be acquired without waiting. If positive, the lock acquire
* will time out after so many milliseconds. An error is
* a timeout if it has `isAcquireTimeout` set to true.
* @param fn The operation to run once the lock is acquired.
*/
export async function processLock<R>(
name: string,
acquireTimeout: number,
fn: () => Promise<R>
): Promise<R> {
const previousOperation = PROCESS_LOCKS[name] ?? Promise.resolve()
const currentOperation = Promise.race(
[
previousOperation.catch(() => {
// ignore error of previous operation that we're waiting to finish
return null
}),
acquireTimeout >= 0
? new Promise((_, reject) => {
setTimeout(() => {
reject(
new ProcessLockAcquireTimeoutError(
`Acquring process lock with name "${name}" timed out`
)
)
}, acquireTimeout)
})
: null,
].filter((x) => x)
)
.catch((e: any) => {
if (e && e.isAcquireTimeout) {
throw e
}
return null
})
.then(async () => {
// previous operations finished and we didn't get a race on the acquire
// timeout, so the current operation can finally start
return await fn()
})
PROCESS_LOCKS[name] = currentOperation.catch(async (e: any) => {
if (e && e.isAcquireTimeout) {
// if the current operation timed out, it doesn't mean that the previous
// operation finished, so we need contnue waiting for it to finish
await previousOperation
return null
}
throw e
})
// finally wait for the current operation to finish successfully, with an
// error or with an acquire timeout error
return await currentOperation
}

23
node_modules/@supabase/auth-js/src/lib/polyfills.ts generated vendored Normal file
View File

@ -0,0 +1,23 @@
/**
* https://mathiasbynens.be/notes/globalthis
*/
export function polyfillGlobalThis() {
if (typeof globalThis === 'object') return
try {
Object.defineProperty(Object.prototype, '__magic__', {
get: function () {
return this
},
configurable: true,
})
// @ts-expect-error 'Allow access to magic'
__magic__.globalThis = __magic__
// @ts-expect-error 'Allow access to magic'
delete Object.prototype.__magic__
} catch (e) {
if (typeof self !== 'undefined') {
// @ts-expect-error 'Allow access to globals'
self.globalThis = self
}
}
}

1201
node_modules/@supabase/auth-js/src/lib/types.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/@supabase/auth-js/src/lib/version.ts generated vendored Normal file
View File

@ -0,0 +1 @@
export const version = '2.68.0'

21
node_modules/@supabase/functions-js/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Supabase
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

22
node_modules/@supabase/functions-js/README.md generated vendored Normal file
View File

@ -0,0 +1,22 @@
# `functions-js`
[![Coverage Status](https://coveralls.io/repos/github/supabase/functions-js/badge.svg?branch=main)](https://coveralls.io/github/supabase/functions-js?branch=main)
JS Client library to interact with Supabase Functions.
## Docs
<https://supabase.com/docs/reference/javascript/functions-invoke>
## testing
To run tests you will need Node 20+.
You are going to need docker daemon running to execute tests.
To start test run use the following command:
```sh
npm i
npm run test
```

65
node_modules/@supabase/functions-js/package.json generated vendored Normal file
View File

@ -0,0 +1,65 @@
{
"name": "@supabase/functions-js",
"version": "2.4.4",
"description": "JS Client library to interact with Supabase Functions.",
"main": "dist/main/index.js",
"module": "dist/module/index.js",
"types": "dist/module/index.d.ts",
"sideEffects": false,
"scripts": {
"clean": "rimraf dist docs/v2",
"format": "prettier --write \"{src,test}/**/*.ts\"",
"build": "run-s clean format build:*",
"build:main": "tsc -p tsconfig.json",
"build:module": "tsc -p tsconfig.module.json",
"docs": "typedoc src/index.ts --out docs/v2",
"docs:json": "typedoc --json docs/v2/spec.json --excludeExternals src/index.ts",
"test": "jest",
"test:coverage": "jest --coverage"
},
"repository": {
"type": "git",
"url": "git+https://github.com/supabase/functions-js.git"
},
"keywords": [
"functions",
"supabase"
],
"author": "Supabase",
"files": [
"dist",
"src"
],
"license": "MIT",
"bugs": {
"url": "https://github.com/supabase/functions-js/issues"
},
"homepage": "https://github.com/supabase/functions-js#readme",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
},
"devDependencies": {
"@sebbo2002/semantic-release-jsr": "^1.0.0",
"@types/jest": "^28.1.0",
"@types/jsonwebtoken": "^8.5.8",
"@types/node": "^18.7.0",
"genversion": "^3.0.2",
"jest": "^28.1.0",
"jsonwebtoken": "^9.0.0",
"nanoid": "^3.3.1",
"npm-run-all": "^4.1.5",
"openai": "^4.52.5",
"prettier": "^2.6.0",
"rimraf": "^3.0.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"testcontainers": "^8.5.1",
"ts-jest": "^28.0.0",
"ts-node": "^10.9.0",
"ts-test-decorators": "^0.0.6",
"typedoc": "^0.22.13",
"typescript": "^4.6.2"
},
"publishConfig": {
"access": "public"
}
}

View File

@ -0,0 +1,132 @@
import { resolveFetch } from './helper'
import {
Fetch,
FunctionsFetchError,
FunctionsHttpError,
FunctionsRelayError,
FunctionsResponse,
FunctionInvokeOptions,
FunctionRegion,
} from './types'
export class FunctionsClient {
protected url: string
protected headers: Record<string, string>
protected region: FunctionRegion
protected fetch: Fetch
constructor(
url: string,
{
headers = {},
customFetch,
region = FunctionRegion.Any,
}: {
headers?: Record<string, string>
customFetch?: Fetch
region?: FunctionRegion
} = {}
) {
this.url = url
this.headers = headers
this.region = region
this.fetch = resolveFetch(customFetch)
}
/**
* Updates the authorization header
* @param token - the new jwt token sent in the authorisation header
*/
setAuth(token: string) {
this.headers.Authorization = `Bearer ${token}`
}
/**
* Invokes a function
* @param functionName - The name of the Function to invoke.
* @param options - Options for invoking the Function.
*/
async invoke<T = any>(
functionName: string,
options: FunctionInvokeOptions = {}
): Promise<FunctionsResponse<T>> {
try {
const { headers, method, body: functionArgs } = options
let _headers: Record<string, string> = {}
let { region } = options
if (!region) {
region = this.region
}
if (region && region !== 'any') {
_headers['x-region'] = region
}
let body: any
if (
functionArgs &&
((headers && !Object.prototype.hasOwnProperty.call(headers, 'Content-Type')) || !headers)
) {
if (
(typeof Blob !== 'undefined' && functionArgs instanceof Blob) ||
functionArgs instanceof ArrayBuffer
) {
// will work for File as File inherits Blob
// also works for ArrayBuffer as it is the same underlying structure as a Blob
_headers['Content-Type'] = 'application/octet-stream'
body = functionArgs
} else if (typeof functionArgs === 'string') {
// plain string
_headers['Content-Type'] = 'text/plain'
body = functionArgs
} else if (typeof FormData !== 'undefined' && functionArgs instanceof FormData) {
// don't set content-type headers
// Request will automatically add the right boundary value
body = functionArgs
} else {
// default, assume this is JSON
_headers['Content-Type'] = 'application/json'
body = JSON.stringify(functionArgs)
}
}
const response = await this.fetch(`${this.url}/${functionName}`, {
method: method || 'POST',
// headers priority is (high to low):
// 1. invoke-level headers
// 2. client-level headers
// 3. default Content-Type header
headers: { ..._headers, ...this.headers, ...headers },
body,
}).catch((fetchError) => {
throw new FunctionsFetchError(fetchError)
})
const isRelayError = response.headers.get('x-relay-error')
if (isRelayError && isRelayError === 'true') {
throw new FunctionsRelayError(response)
}
if (!response.ok) {
throw new FunctionsHttpError(response)
}
let responseType = (response.headers.get('Content-Type') ?? 'text/plain').split(';')[0].trim()
let data: any
if (responseType === 'application/json') {
data = await response.json()
} else if (responseType === 'application/octet-stream') {
data = await response.blob()
} else if (responseType === 'text/event-stream') {
data = response
} else if (responseType === 'multipart/form-data') {
data = await response.formData()
} else {
// default to text
data = await response.text()
}
return { data, error: null }
} catch (error) {
return { data: null, error }
}
}
}

View File

@ -0,0 +1,62 @@
declare namespace Supabase {
export interface ModelOptions {
/**
* Pool embeddings by taking their mean. Applies only for `gte-small` model
*/
mean_pool?: boolean
/**
* Normalize the embeddings result. Applies only for `gte-small` model
*/
normalize?: boolean
/**
* Stream response from model. Applies only for LLMs like `mistral` (default: false)
*/
stream?: boolean
/**
* Automatically abort the request to the model after specified time (in seconds). Applies only for LLMs like `mistral` (default: 60)
*/
timeout?: number
/**
* Mode for the inference API host. (default: 'ollama')
*/
mode?: 'ollama' | 'openaicompatible'
signal?: AbortSignal
}
export class Session {
/**
* Create a new model session using given model
*/
constructor(model: string, sessionOptions?: unknown)
/**
* Execute the given prompt in model session
*/
run(
prompt:
| string
| Omit<import('openai').OpenAI.Chat.ChatCompletionCreateParams, 'model' | 'stream'>,
modelOptions?: ModelOptions
): unknown
}
/**
* Provides AI related APIs
*/
export interface Ai {
readonly Session: typeof Session
}
/**
* Provides AI related APIs
*/
export const ai: Ai
}
declare namespace EdgeRuntime {
export function waitUntil<T>(promise: Promise<T>): Promise<T>
}

14
node_modules/@supabase/functions-js/src/helper.ts generated vendored Normal file
View File

@ -0,0 +1,14 @@
import { Fetch } from './types'
export const resolveFetch = (customFetch?: Fetch): Fetch => {
let _fetch: Fetch
if (customFetch) {
_fetch = customFetch
} else if (typeof fetch === 'undefined') {
_fetch = (...args) =>
import('@supabase/node-fetch' as any).then(({ default: fetch }) => fetch(...args))
} else {
_fetch = fetch
}
return (...args) => _fetch(...args)
}

10
node_modules/@supabase/functions-js/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,10 @@
export { FunctionsClient } from './FunctionsClient'
export {
type FunctionInvokeOptions,
FunctionsError,
FunctionsFetchError,
FunctionsHttpError,
FunctionsRelayError,
FunctionRegion,
type FunctionsResponse,
} from './types'

86
node_modules/@supabase/functions-js/src/types.ts generated vendored Normal file
View File

@ -0,0 +1,86 @@
export type Fetch = typeof fetch
/**
* Response format
*
*/
export interface FunctionsResponseSuccess<T> {
data: T
error: null
}
export interface FunctionsResponseFailure {
data: null
error: any
}
export type FunctionsResponse<T> = FunctionsResponseSuccess<T> | FunctionsResponseFailure
export class FunctionsError extends Error {
context: any
constructor(message: string, name = 'FunctionsError', context?: any) {
super(message)
this.name = name
this.context = context
}
}
export class FunctionsFetchError extends FunctionsError {
constructor(context: any) {
super('Failed to send a request to the Edge Function', 'FunctionsFetchError', context)
}
}
export class FunctionsRelayError extends FunctionsError {
constructor(context: any) {
super('Relay Error invoking the Edge Function', 'FunctionsRelayError', context)
}
}
export class FunctionsHttpError extends FunctionsError {
constructor(context: any) {
super('Edge Function returned a non-2xx status code', 'FunctionsHttpError', context)
}
}
// Define the enum for the 'region' property
export enum FunctionRegion {
Any = 'any',
ApNortheast1 = 'ap-northeast-1',
ApNortheast2 = 'ap-northeast-2',
ApSouth1 = 'ap-south-1',
ApSoutheast1 = 'ap-southeast-1',
ApSoutheast2 = 'ap-southeast-2',
CaCentral1 = 'ca-central-1',
EuCentral1 = 'eu-central-1',
EuWest1 = 'eu-west-1',
EuWest2 = 'eu-west-2',
EuWest3 = 'eu-west-3',
SaEast1 = 'sa-east-1',
UsEast1 = 'us-east-1',
UsWest1 = 'us-west-1',
UsWest2 = 'us-west-2',
}
export type FunctionInvokeOptions = {
/**
* Object representing the headers to send with the request.
* */
headers?: { [key: string]: string }
/**
* The HTTP verb of the request
*/
method?: 'POST' | 'GET' | 'PUT' | 'PATCH' | 'DELETE'
/**
* The Region to invoke the function in.
*/
region?: FunctionRegion
/**
* The body of the request.
*/
body?:
| File
| Blob
| ArrayBuffer
| FormData
| ReadableStream<Uint8Array>
| Record<string, any>
| string
}

1
node_modules/@supabase/functions-js/src/version.ts generated vendored Normal file
View File

@ -0,0 +1 @@
export const version = '2.4.4'

22
node_modules/@supabase/node-fetch/LICENSE.md generated vendored Normal file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

633
node_modules/@supabase/node-fetch/README.md generated vendored Normal file
View File

@ -0,0 +1,633 @@
node-fetch
==========
[![npm version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![coverage status][codecov-image]][codecov-url]
[![install size][install-size-image]][install-size-url]
[![Discord][discord-image]][discord-url]
A light-weight module that brings `window.fetch` to Node.js
(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567))
[![Backers][opencollective-image]][opencollective-url]
<!-- TOC -->
- [Motivation](#motivation)
- [Features](#features)
- [Difference from client-side fetch](#difference-from-client-side-fetch)
- [Installation](#installation)
- [Loading and configuring the module](#loading-and-configuring-the-module)
- [Common Usage](#common-usage)
- [Plain text or HTML](#plain-text-or-html)
- [JSON](#json)
- [Simple Post](#simple-post)
- [Post with JSON](#post-with-json)
- [Post with form parameters](#post-with-form-parameters)
- [Handling exceptions](#handling-exceptions)
- [Handling client and server errors](#handling-client-and-server-errors)
- [Advanced Usage](#advanced-usage)
- [Streams](#streams)
- [Buffer](#buffer)
- [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data)
- [Extract Set-Cookie Header](#extract-set-cookie-header)
- [Post data using a file stream](#post-data-using-a-file-stream)
- [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart)
- [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal)
- [API](#api)
- [fetch(url[, options])](#fetchurl-options)
- [Options](#options)
- [Class: Request](#class-request)
- [Class: Response](#class-response)
- [Class: Headers](#class-headers)
- [Interface: Body](#interface-body)
- [Class: FetchError](#class-fetcherror)
- [License](#license)
- [Acknowledgement](#acknowledgement)
<!-- /TOC -->
## Motivation
Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime.
See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side).
## Features
- Stay consistent with `window.fetch` API.
- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences.
- Use native promise but allow substituting it with [insert your favorite promise library].
- Use native Node streams for body on both request and response.
- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting.
## Difference from client-side fetch
- See [Known Differences](LIMITS.md) for details.
- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
- Pull requests are welcomed too!
## Installation
Current stable release (`2.x`)
```sh
$ npm install node-fetch
```
## Loading and configuring the module
We suggest you load the module via `require` until the stabilization of ES modules in node:
```js
const fetch = require('node-fetch');
```
If you are using a Promise library other than native, set it through `fetch.Promise`:
```js
const Bluebird = require('bluebird');
fetch.Promise = Bluebird;
```
## Common Usage
NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences.
#### Plain text or HTML
```js
fetch('https://github.com/')
.then(res => res.text())
.then(body => console.log(body));
```
#### JSON
```js
fetch('https://api.github.com/users/github')
.then(res => res.json())
.then(json => console.log(json));
```
#### Simple Post
```js
fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' })
.then(res => res.json()) // expecting a json response
.then(json => console.log(json));
```
#### Post with JSON
```js
const body = { a: 1 };
fetch('https://httpbin.org/post', {
method: 'post',
body: JSON.stringify(body),
headers: { 'Content-Type': 'application/json' },
})
.then(res => res.json())
.then(json => console.log(json));
```
#### Post with form parameters
`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such:
```js
const { URLSearchParams } = require('url');
const params = new URLSearchParams();
params.append('a', 1);
fetch('https://httpbin.org/post', { method: 'POST', body: params })
.then(res => res.json())
.then(json => console.log(json));
```
#### Handling exceptions
NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information.
Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details.
```js
fetch('https://domain.invalid/')
.catch(err => console.error(err));
```
#### Handling client and server errors
It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses:
```js
function checkStatus(res) {
if (res.ok) { // res.status >= 200 && res.status < 300
return res;
} else {
throw MyCustomError(res.statusText);
}
}
fetch('https://httpbin.org/status/400')
.then(checkStatus)
.then(res => console.log('will not get here...'))
```
## Advanced Usage
#### Streams
The "Node.js way" is to use streams when possible:
```js
fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
.then(res => {
const dest = fs.createWriteStream('./octocat.png');
res.body.pipe(dest);
});
```
In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch
errors -- the longer a response runs, the more likely it is to encounter an error.
```js
const fetch = require('node-fetch');
const response = await fetch('https://httpbin.org/stream/3');
try {
for await (const chunk of response.body) {
console.dir(JSON.parse(chunk.toString()));
}
} catch (err) {
console.error(err.stack);
}
```
In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams
did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors
directly from the stream and wait on it response to fully close.
```js
const fetch = require('node-fetch');
const read = async body => {
let error;
body.on('error', err => {
error = err;
});
for await (const chunk of body) {
console.dir(JSON.parse(chunk.toString()));
}
return new Promise((resolve, reject) => {
body.on('close', () => {
error ? reject(error) : resolve();
});
});
};
try {
const response = await fetch('https://httpbin.org/stream/3');
await read(response.body);
} catch (err) {
console.error(err.stack);
}
```
#### Buffer
If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API)
```js
const fileType = require('file-type');
fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
.then(res => res.buffer())
.then(buffer => fileType(buffer))
.then(type => { /* ... */ });
```
#### Accessing Headers and other Meta data
```js
fetch('https://github.com/')
.then(res => {
console.log(res.ok);
console.log(res.status);
console.log(res.statusText);
console.log(res.headers.raw());
console.log(res.headers.get('content-type'));
});
```
#### Extract Set-Cookie Header
Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API.
```js
fetch(url).then(res => {
// returns an array of values, instead of a string of comma-separated values
console.log(res.headers.raw()['set-cookie']);
});
```
#### Post data using a file stream
```js
const { createReadStream } = require('fs');
const stream = createReadStream('input.txt');
fetch('https://httpbin.org/post', { method: 'POST', body: stream })
.then(res => res.json())
.then(json => console.log(json));
```
#### Post with form-data (detect multipart)
```js
const FormData = require('form-data');
const form = new FormData();
form.append('a', 1);
fetch('https://httpbin.org/post', { method: 'POST', body: form })
.then(res => res.json())
.then(json => console.log(json));
// OR, using custom headers
// NOTE: getHeaders() is non-standard API
const form = new FormData();
form.append('a', 1);
const options = {
method: 'POST',
body: form,
headers: form.getHeaders()
}
fetch('https://httpbin.org/post', options)
.then(res => res.json())
.then(json => console.log(json));
```
#### Request cancellation with AbortSignal
> NOTE: You may cancel streamed requests only on Node >= v8.0.0
You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller).
An example of timing out a request after 150ms could be achieved as the following:
```js
import AbortController from 'abort-controller';
const controller = new AbortController();
const timeout = setTimeout(
() => { controller.abort(); },
150,
);
fetch(url, { signal: controller.signal })
.then(res => res.json())
.then(
data => {
useData(data)
},
err => {
if (err.name === 'AbortError') {
// request was aborted
}
},
)
.finally(() => {
clearTimeout(timeout);
});
```
See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples.
## API
### fetch(url[, options])
- `url` A string representing the URL for fetching
- `options` [Options](#fetch-options) for the HTTP(S) request
- Returns: <code>Promise&lt;[Response](#class-response)&gt;</code>
Perform an HTTP(S) fetch.
`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`.
<a id="fetch-options"></a>
### Options
The default values are shown after each option key.
```js
{
// These properties are part of the Fetch Standard
method: 'GET',
headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
signal: null, // pass an instance of AbortSignal to optionally abort requests
// The following properties are node-fetch extensions
follow: 20, // maximum redirect count. 0 to not follow redirect
timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
compress: true, // support gzip/deflate content encoding. false to disable
size: 0, // maximum response body size in bytes. 0 to disable
agent: null // http(s).Agent instance or function that returns an instance (see below)
}
```
##### Default Headers
If no values are set, the following request headers will be sent automatically:
Header | Value
------------------- | --------------------------------------------------------
`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
`Accept` | `*/*`
`Connection` | `close` _(when no `options.agent` is present)_
`Content-Length` | _(automatically calculated, if possible)_
`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
Note: when `body` is a `Stream`, `Content-Length` is not set automatically.
##### Custom Agent
The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following:
- Support self-signed certificate
- Use only IPv4 or IPv6
- Custom DNS Lookup
See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information.
In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol.
```js
const httpAgent = new http.Agent({
keepAlive: true
});
const httpsAgent = new https.Agent({
keepAlive: true
});
const options = {
agent: function (_parsedURL) {
if (_parsedURL.protocol == 'http:') {
return httpAgent;
} else {
return httpsAgent;
}
}
}
```
<a id="class-request"></a>
### Class: Request
An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
Due to the nature of Node.js, the following properties are not implemented at this moment:
- `type`
- `destination`
- `referrer`
- `referrerPolicy`
- `mode`
- `credentials`
- `cache`
- `integrity`
- `keepalive`
The following node-fetch extension properties are provided:
- `follow`
- `compress`
- `counter`
- `agent`
See [options](#fetch-options) for exact meaning of these extensions.
#### new Request(input[, options])
<small>*(spec-compliant)*</small>
- `input` A string representing a URL, or another `Request` (which will be cloned)
- `options` [Options][#fetch-options] for the HTTP(S) request
Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request).
In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
<a id="class-response"></a>
### Class: Response
An HTTP(S) response. This class implements the [Body](#iface-body) interface.
The following properties are not implemented in node-fetch at this moment:
- `Response.error()`
- `Response.redirect()`
- `type`
- `trailer`
#### new Response([body[, options]])
<small>*(spec-compliant)*</small>
- `body` A `String` or [`Readable` stream][node-readable]
- `options` A [`ResponseInit`][response-init] options dictionary
Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response).
Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly.
#### response.ok
<small>*(spec-compliant)*</small>
Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300.
#### response.redirected
<small>*(spec-compliant)*</small>
Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0.
<a id="class-headers"></a>
### Class: Headers
This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
#### new Headers([init])
<small>*(spec-compliant)*</small>
- `init` Optional argument to pre-fill the `Headers` object
Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object.
```js
// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
const meta = {
'Content-Type': 'text/xml',
'Breaking-Bad': '<3'
};
const headers = new Headers(meta);
// The above is equivalent to
const meta = [
[ 'Content-Type', 'text/xml' ],
[ 'Breaking-Bad', '<3' ]
];
const headers = new Headers(meta);
// You can in fact use any iterable objects, like a Map or even another Headers
const meta = new Map();
meta.set('Content-Type', 'text/xml');
meta.set('Breaking-Bad', '<3');
const headers = new Headers(meta);
const copyOfHeaders = new Headers(headers);
```
<a id="iface-body"></a>
### Interface: Body
`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
The following methods are not yet implemented in node-fetch at this moment:
- `formData()`
#### body.body
<small>*(deviation from spec)*</small>
* Node.js [`Readable` stream][node-readable]
Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable].
#### body.bodyUsed
<small>*(spec-compliant)*</small>
* `Boolean`
A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again.
#### body.arrayBuffer()
#### body.blob()
#### body.json()
#### body.text()
<small>*(spec-compliant)*</small>
* Returns: <code>Promise</code>
Consume the body and return a promise that will resolve to one of these formats.
#### body.buffer()
<small>*(node-fetch extension)*</small>
* Returns: <code>Promise&lt;Buffer&gt;</code>
Consume the body and return a promise that will resolve to a Buffer.
#### body.textConverted()
<small>*(node-fetch extension)*</small>
* Returns: <code>Promise&lt;String&gt;</code>
Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible.
(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
<a id="class-fetcherror"></a>
### Class: FetchError
<small>*(node-fetch extension)*</small>
An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
<a id="class-aborterror"></a>
### Class: AbortError
<small>*(node-fetch extension)*</small>
An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info.
## Acknowledgement
Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr).
## License
MIT
[npm-image]: https://flat.badgen.net/npm/v/node-fetch
[npm-url]: https://www.npmjs.com/package/node-fetch
[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch
[travis-url]: https://travis-ci.org/bitinn/node-fetch
[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master
[codecov-url]: https://codecov.io/gh/bitinn/node-fetch
[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch
[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch
[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square
[discord-url]: https://discord.gg/Zxbndcm
[opencollective-image]: https://opencollective.com/node-fetch/backers.svg
[opencollective-url]: https://opencollective.com/node-fetch
[whatwg-fetch]: https://fetch.spec.whatwg.org/
[response-init]: https://fetch.spec.whatwg.org/#responseinit
[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md
[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md

22
node_modules/@supabase/node-fetch/browser.js generated vendored Normal file
View File

@ -0,0 +1,22 @@
"use strict";
// ref: https://github.com/tc39/proposal-global
var getGlobal = function() {
// the only reliable means to get the global object is
// `Function('return this')()`
// However, this causes CSP violations in Chrome apps.
if (typeof self !== 'undefined') { return self; }
if (typeof window !== 'undefined') { return window; }
if (typeof global !== 'undefined') { return global; }
throw new Error('unable to locate global object');
}
var globalObject = getGlobal();
export const fetch = globalObject.fetch;
export default globalObject.fetch.bind(globalObject);
export const Headers = globalObject.Headers;
export const Request = globalObject.Request;
export const Response = globalObject.Response;

1778
node_modules/@supabase/node-fetch/lib/index.es.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1787
node_modules/@supabase/node-fetch/lib/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1776
node_modules/@supabase/node-fetch/lib/index.mjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

80
node_modules/@supabase/node-fetch/package.json generated vendored Normal file
View File

@ -0,0 +1,80 @@
{
"name": "@supabase/node-fetch",
"publishConfig": {
"access": "public"
},
"version": "2.6.15",
"description": "A light-weight module that brings window.fetch to node.js",
"main": "lib/index.js",
"browser": "./browser.js",
"files": [
"lib/index.js",
"lib/index.mjs",
"lib/index.es.js",
"browser.js"
],
"engines": {
"node": "4.x || >=6.0.0"
},
"scripts": {
"build": "cross-env BABEL_ENV=rollup rollup -c",
"prepare": "npm run build",
"test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js",
"report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
"coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json"
},
"repository": "supabase/node-fetch",
"keywords": [
"fetch",
"http",
"promise"
],
"author": "David Frank",
"license": "MIT",
"bugs": {
"url": "https://github.com/supabase/node-fetch/issues"
},
"homepage": "https://github.com/supabase/node-fetch",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"devDependencies": {
"@ungap/url-search-params": "^0.1.2",
"abort-controller": "^1.1.0",
"abortcontroller-polyfill": "^1.3.0",
"babel-core": "^6.26.3",
"babel-plugin-istanbul": "^4.1.6",
"babel-plugin-transform-async-generator-functions": "^6.24.1",
"babel-polyfill": "^6.26.0",
"babel-preset-env": "1.4.0",
"babel-register": "^6.16.3",
"chai": "^3.5.0",
"chai-as-promised": "^7.1.1",
"chai-iterator": "^1.1.1",
"chai-string": "~1.3.0",
"codecov": "3.3.0",
"cross-env": "^5.2.0",
"form-data": "^2.3.3",
"is-builtin-module": "^1.0.0",
"mocha": "^5.0.0",
"nyc": "11.9.0",
"parted": "^0.1.1",
"promise": "^8.0.3",
"resumer": "0.0.0",
"rollup": "^0.63.4",
"rollup-plugin-babel": "^3.0.7",
"string-to-arraybuffer": "^1.0.2",
"teeny-request": "3.7.0"
},
"release": {
"branches": [
"+([0-9]).x",
"main",
"next",
{
"name": "beta",
"prerelease": true
}
]
}
}

21
node_modules/@supabase/postgrest-js/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Supabase
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

54
node_modules/@supabase/postgrest-js/README.md generated vendored Normal file
View File

@ -0,0 +1,54 @@
# `postgrest-js`
[![Build](https://github.com/supabase/postgrest-js/workflows/CI/badge.svg)](https://github.com/supabase/postgrest-js/actions?query=branch%3Amaster)
[![Package](https://img.shields.io/npm/v/@supabase/postgrest-js)](https://www.npmjs.com/package/@supabase/postgrest-js)
[![License: MIT](https://img.shields.io/npm/l/@supabase/postgrest-js)](#license)
Isomorphic JavaScript client for [PostgREST](https://postgrest.org). The goal of this library is to make an "ORM-like" restful interface.
Full documentation can be found [here](https://supabase.github.io/postgrest-js/v2).
### Quick start
Install
```bash
npm install @supabase/postgrest-js
```
Usage
```js
import { PostgrestClient } from '@supabase/postgrest-js'
const REST_URL = 'http://localhost:3000'
const postgrest = new PostgrestClient(REST_URL)
```
- select(): https://supabase.com/docs/reference/javascript/select
- insert(): https://supabase.com/docs/reference/javascript/insert
- update(): https://supabase.com/docs/reference/javascript/update
- delete(): https://supabase.com/docs/reference/javascript/delete
#### Custom `fetch` implementation
`postgrest-js` uses the [`cross-fetch`](https://www.npmjs.com/package/cross-fetch) library to make HTTP requests, but an alternative `fetch` implementation can be provided as an option. This is most useful in environments where `cross-fetch` is not compatible, for instance Cloudflare Workers:
```js
import { PostgrestClient } from '@supabase/postgrest-js'
const REST_URL = 'http://localhost:3000'
const postgrest = new PostgrestClient(REST_URL, {
fetch: (...args) => fetch(...args),
})
```
## License
This repo is licensed under MIT License.
## Sponsors
We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products dont exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone.
[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase)

67
node_modules/@supabase/postgrest-js/package.json generated vendored Normal file
View File

@ -0,0 +1,67 @@
{
"name": "@supabase/postgrest-js",
"version": "1.19.2",
"description": "Isomorphic PostgREST client",
"keywords": [
"postgrest",
"supabase"
],
"homepage": "https://github.com/supabase/postgrest-js",
"bugs": "https://github.com/supabase/postgrest-js/issues",
"license": "MIT",
"author": "Supabase",
"files": [
"dist",
"src"
],
"main": "dist/cjs/index.js",
"module": "dist/esm/wrapper.mjs",
"exports": {
"import": {
"types": "./dist/cjs/index.d.ts",
"default": "./dist/esm/wrapper.mjs"
},
"require": {
"types": "./dist/cjs/index.d.ts",
"default": "./dist/cjs/index.js"
}
},
"types": "./dist/cjs/index.d.ts",
"repository": "supabase/postgrest-js",
"scripts": {
"clean": "rimraf dist docs/v2",
"format": "prettier --write \"{src,test}/**/*.ts\" wrapper.mjs",
"format:check": "prettier --check \"{src,test}/**/*.ts\"",
"build": "run-s clean format build:*",
"build:cjs": "tsc -p tsconfig.json",
"build:esm": "cpy wrapper.mjs dist/esm/",
"docs": "typedoc src/index.ts --out docs/v2",
"docs:json": "typedoc --json docs/v2/spec.json --excludeExternals src/index.ts",
"test": "run-s format:check test:types db:clean db:run test:run db:clean && node test/smoke.cjs && node test/smoke.mjs",
"test:run": "jest --runInBand --coverage",
"test:update": "run-s db:clean db:run && jest --runInBand --updateSnapshot && run-s db:clean",
"test:types": "run-s build && tsd --files 'test/**/*.test-d.ts'",
"db:clean": "cd test/db && docker compose down --volumes",
"db:run": "cd test/db && docker compose up --detach && wait-for-localhost 3000"
},
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
},
"devDependencies": {
"@types/jest": "^27.5.1",
"cpy-cli": "^5.0.0",
"jest": "^28.1.0",
"node-abort-controller": "^3.0.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.6.2",
"rimraf": "^3.0.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"ts-expect": "^1.3.0",
"ts-jest": "^28.0.3",
"tsd": "^0.31.2",
"type-fest": "^4.32.0",
"typedoc": "^0.22.16",
"typescript": "^4.5.5",
"wait-for-localhost-cli": "^3.0.0"
}
}

View File

@ -0,0 +1,279 @@
// @ts-ignore
import nodeFetch from '@supabase/node-fetch'
import type {
Fetch,
PostgrestSingleResponse,
PostgrestResponseSuccess,
CheckMatchingArrayTypes,
MergePartialResult,
IsValidResultOverride,
} from './types'
import PostgrestError from './PostgrestError'
import { ContainsNull } from './select-query-parser/types'
export default abstract class PostgrestBuilder<Result, ThrowOnError extends boolean = false>
implements
PromiseLike<
ThrowOnError extends true ? PostgrestResponseSuccess<Result> : PostgrestSingleResponse<Result>
>
{
protected method: 'GET' | 'HEAD' | 'POST' | 'PATCH' | 'DELETE'
protected url: URL
protected headers: Record<string, string>
protected schema?: string
protected body?: unknown
protected shouldThrowOnError = false
protected signal?: AbortSignal
protected fetch: Fetch
protected isMaybeSingle: boolean
constructor(builder: PostgrestBuilder<Result>) {
this.method = builder.method
this.url = builder.url
this.headers = builder.headers
this.schema = builder.schema
this.body = builder.body
this.shouldThrowOnError = builder.shouldThrowOnError
this.signal = builder.signal
this.isMaybeSingle = builder.isMaybeSingle
if (builder.fetch) {
this.fetch = builder.fetch
} else if (typeof fetch === 'undefined') {
this.fetch = nodeFetch
} else {
this.fetch = fetch
}
}
/**
* If there's an error with the query, throwOnError will reject the promise by
* throwing the error instead of returning it as part of a successful response.
*
* {@link https://github.com/supabase/supabase-js/issues/92}
*/
throwOnError(): this & PostgrestBuilder<Result, true> {
this.shouldThrowOnError = true
return this as this & PostgrestBuilder<Result, true>
}
/**
* Set an HTTP header for the request.
*/
setHeader(name: string, value: string): this {
this.headers = { ...this.headers }
this.headers[name] = value
return this
}
then<
TResult1 = ThrowOnError extends true
? PostgrestResponseSuccess<Result>
: PostgrestSingleResponse<Result>,
TResult2 = never
>(
onfulfilled?:
| ((
value: ThrowOnError extends true
? PostgrestResponseSuccess<Result>
: PostgrestSingleResponse<Result>
) => TResult1 | PromiseLike<TResult1>)
| undefined
| null,
onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null
): PromiseLike<TResult1 | TResult2> {
// https://postgrest.org/en/stable/api.html#switching-schemas
if (this.schema === undefined) {
// skip
} else if (['GET', 'HEAD'].includes(this.method)) {
this.headers['Accept-Profile'] = this.schema
} else {
this.headers['Content-Profile'] = this.schema
}
if (this.method !== 'GET' && this.method !== 'HEAD') {
this.headers['Content-Type'] = 'application/json'
}
// NOTE: Invoke w/o `this` to avoid illegal invocation error.
// https://github.com/supabase/postgrest-js/pull/247
const _fetch = this.fetch
let res = _fetch(this.url.toString(), {
method: this.method,
headers: this.headers,
body: JSON.stringify(this.body),
signal: this.signal,
}).then(async (res) => {
let error = null
let data = null
let count: number | null = null
let status = res.status
let statusText = res.statusText
if (res.ok) {
if (this.method !== 'HEAD') {
const body = await res.text()
if (body === '') {
// Prefer: return=minimal
} else if (this.headers['Accept'] === 'text/csv') {
data = body
} else if (
this.headers['Accept'] &&
this.headers['Accept'].includes('application/vnd.pgrst.plan+text')
) {
data = body
} else {
data = JSON.parse(body)
}
}
const countHeader = this.headers['Prefer']?.match(/count=(exact|planned|estimated)/)
const contentRange = res.headers.get('content-range')?.split('/')
if (countHeader && contentRange && contentRange.length > 1) {
count = parseInt(contentRange[1])
}
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.isMaybeSingle && this.method === 'GET' && Array.isArray(data)) {
if (data.length > 1) {
error = {
// https://github.com/PostgREST/postgrest/blob/a867d79c42419af16c18c3fb019eba8df992626f/src/PostgREST/Error.hs#L553
code: 'PGRST116',
details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
hint: null,
message: 'JSON object requested, multiple (or no) rows returned',
}
data = null
count = null
status = 406
statusText = 'Not Acceptable'
} else if (data.length === 1) {
data = data[0]
} else {
data = null
}
}
} else {
const body = await res.text()
try {
error = JSON.parse(body)
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (Array.isArray(error) && res.status === 404) {
data = []
error = null
status = 200
statusText = 'OK'
}
} catch {
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (res.status === 404 && body === '') {
status = 204
statusText = 'No Content'
} else {
error = {
message: body,
}
}
}
if (error && this.isMaybeSingle && error?.details?.includes('0 rows')) {
error = null
status = 200
statusText = 'OK'
}
if (error && this.shouldThrowOnError) {
throw new PostgrestError(error)
}
}
const postgrestResponse = {
error,
data,
count,
status,
statusText,
}
return postgrestResponse
})
if (!this.shouldThrowOnError) {
res = res.catch((fetchError) => ({
error: {
message: `${fetchError?.name ?? 'FetchError'}: ${fetchError?.message}`,
details: `${fetchError?.stack ?? ''}`,
hint: '',
code: `${fetchError?.code ?? ''}`,
},
data: null,
count: null,
status: 0,
statusText: '',
}))
}
return res.then(onfulfilled, onrejected)
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestBuilder<CheckMatchingArrayTypes<Result, NewResult>, ThrowOnError> {
/* istanbul ignore next */
return this as unknown as PostgrestBuilder<
CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
>
}
/**
* Override the type of the returned `data` field in the response.
*
* @typeParam NewResult - The new type to cast the response data to
* @typeParam Options - Optional type configuration (defaults to { merge: true })
* @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true)
* @example
* ```typescript
* // Merge with existing types (default behavior)
* const query = supabase
* .from('users')
* .select()
* .overrideTypes<{ custom_field: string }>()
*
* // Replace existing types completely
* const replaceQuery = supabase
* .from('users')
* .select()
* .overrideTypes<{ id: number; name: string }, { merge: false }>()
* ```
* @returns A PostgrestBuilder instance with the new type
*/
overrideTypes<
NewResult,
Options extends { merge?: boolean } = { merge: true }
>(): PostgrestBuilder<
IsValidResultOverride<Result, NewResult, false, false> extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? MergePartialResult<NewResult, NonNullable<Result>, Options> | null
: MergePartialResult<NewResult, Result, Options>
: CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
> {
return this as unknown as PostgrestBuilder<
IsValidResultOverride<Result, NewResult, false, false> extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? MergePartialResult<NewResult, NonNullable<Result>, Options> | null
: MergePartialResult<NewResult, Result, Options>
: CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
>
}
}

View File

@ -0,0 +1,181 @@
import PostgrestQueryBuilder from './PostgrestQueryBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import PostgrestBuilder from './PostgrestBuilder'
import { DEFAULT_HEADERS } from './constants'
import { Fetch, GenericSchema } from './types'
/**
* PostgREST client.
*
* @typeParam Database - Types for the schema from the [type
* generator](https://supabase.com/docs/reference/javascript/next/typescript-support)
*
* @typeParam SchemaName - Postgres schema to switch to. Must be a string
* literal, the same one passed to the constructor. If the schema is not
* `"public"`, this must be supplied manually.
*/
export default class PostgrestClient<
Database = any,
SchemaName extends string & keyof Database = 'public' extends keyof Database
? 'public'
: string & keyof Database,
Schema extends GenericSchema = Database[SchemaName] extends GenericSchema
? Database[SchemaName]
: any
> {
url: string
headers: Record<string, string>
schemaName?: SchemaName
fetch?: Fetch
// TODO: Add back shouldThrowOnError once we figure out the typings
/**
* Creates a PostgREST client.
*
* @param url - URL of the PostgREST endpoint
* @param options - Named parameters
* @param options.headers - Custom headers
* @param options.schema - Postgres schema to switch to
* @param options.fetch - Custom fetch
*/
constructor(
url: string,
{
headers = {},
schema,
fetch,
}: {
headers?: Record<string, string>
schema?: SchemaName
fetch?: Fetch
} = {}
) {
this.url = url
this.headers = { ...DEFAULT_HEADERS, ...headers }
this.schemaName = schema
this.fetch = fetch
}
from<
TableName extends string & keyof Schema['Tables'],
Table extends Schema['Tables'][TableName]
>(relation: TableName): PostgrestQueryBuilder<Schema, Table, TableName>
from<ViewName extends string & keyof Schema['Views'], View extends Schema['Views'][ViewName]>(
relation: ViewName
): PostgrestQueryBuilder<Schema, View, ViewName>
/**
* Perform a query on a table or a view.
*
* @param relation - The table or view name to query
*/
from(relation: string): PostgrestQueryBuilder<Schema, any, any> {
const url = new URL(`${this.url}/${relation}`)
return new PostgrestQueryBuilder(url, {
headers: { ...this.headers },
schema: this.schemaName,
fetch: this.fetch,
})
}
/**
* Select a schema to query or perform an function (rpc) call.
*
* The schema needs to be on the list of exposed schemas inside Supabase.
*
* @param schema - The schema to query
*/
schema<DynamicSchema extends string & keyof Database>(
schema: DynamicSchema
): PostgrestClient<
Database,
DynamicSchema,
Database[DynamicSchema] extends GenericSchema ? Database[DynamicSchema] : any
> {
return new PostgrestClient(this.url, {
headers: this.headers,
schema,
fetch: this.fetch,
})
}
/**
* Perform a function call.
*
* @param fn - The function name to call
* @param args - The arguments to pass to the function call
* @param options - Named parameters
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
* @param options.get - When set to `true`, the function will be called with
* read-only access mode.
* @param options.count - Count algorithm to use to count rows returned by the
* function. Only applicable for [set-returning
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
rpc<FnName extends string & keyof Schema['Functions'], Fn extends Schema['Functions'][FnName]>(
fn: FnName,
args: Fn['Args'] = {},
{
head = false,
get = false,
count,
}: {
head?: boolean
get?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<
Schema,
Fn['Returns'] extends any[]
? Fn['Returns'][number] extends Record<string, unknown>
? Fn['Returns'][number]
: never
: never,
Fn['Returns'],
FnName,
null
> {
let method: 'HEAD' | 'GET' | 'POST'
const url = new URL(`${this.url}/rpc/${fn}`)
let body: unknown | undefined
if (head || get) {
method = head ? 'HEAD' : 'GET'
Object.entries(args)
// params with undefined value needs to be filtered out, otherwise it'll
// show up as `?param=undefined`
.filter(([_, value]) => value !== undefined)
// array values need special syntax
.map(([name, value]) => [name, Array.isArray(value) ? `{${value.join(',')}}` : `${value}`])
.forEach(([name, value]) => {
url.searchParams.append(name, value)
})
} else {
method = 'POST'
body = args
}
const headers = { ...this.headers }
if (count) {
headers['Prefer'] = `count=${count}`
}
return new PostgrestFilterBuilder({
method,
url,
headers,
schema: this.schemaName,
body,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<Fn['Returns']>)
}
}

View File

@ -0,0 +1,18 @@
/**
* Error format
*
* {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes}
*/
export default class PostgrestError extends Error {
details: string
hint: string
code: string
constructor(context: { message: string; details: string; hint: string; code: string }) {
super(context.message)
this.name = 'PostgrestError'
this.details = context.details
this.hint = context.hint
this.code = context.code
}
}

View File

@ -0,0 +1,592 @@
import PostgrestTransformBuilder from './PostgrestTransformBuilder'
import { JsonPathToAccessor, JsonPathToType } from './select-query-parser/utils'
import { GenericSchema } from './types'
type FilterOperator =
| 'eq'
| 'neq'
| 'gt'
| 'gte'
| 'lt'
| 'lte'
| 'like'
| 'ilike'
| 'is'
| 'in'
| 'cs'
| 'cd'
| 'sl'
| 'sr'
| 'nxl'
| 'nxr'
| 'adj'
| 'ov'
| 'fts'
| 'plfts'
| 'phfts'
| 'wfts'
export type IsStringOperator<Path extends string> = Path extends `${string}->>${string}`
? true
: false
// Match relationship filters with `table.column` syntax and resolve underlying
// column value. If not matched, fallback to generic type.
// TODO: Validate the relationship itself ala select-query-parser. Currently we
// assume that all tables have valid relationships to each other, despite
// nonexistent foreign keys.
type ResolveFilterValue<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
ColumnName extends string
> = ColumnName extends `${infer RelationshipTable}.${infer Remainder}`
? Remainder extends `${infer _}.${infer _}`
? ResolveFilterValue<Schema, Row, Remainder>
: ResolveFilterRelationshipValue<Schema, RelationshipTable, Remainder>
: ColumnName extends keyof Row
? Row[ColumnName]
: // If the column selection is a jsonpath like `data->value` or `data->>value` we attempt to match
// the expected type with the parsed custom json type
IsStringOperator<ColumnName> extends true
? string
: JsonPathToType<Row, JsonPathToAccessor<ColumnName>> extends infer JsonPathValue
? JsonPathValue extends never
? never
: JsonPathValue
: never
type ResolveFilterRelationshipValue<
Schema extends GenericSchema,
RelationshipTable extends string,
RelationshipColumn extends string
> = Schema['Tables'] & Schema['Views'] extends infer TablesAndViews
? RelationshipTable extends keyof TablesAndViews
? 'Row' extends keyof TablesAndViews[RelationshipTable]
? RelationshipColumn extends keyof TablesAndViews[RelationshipTable]['Row']
? TablesAndViews[RelationshipTable]['Row'][RelationshipColumn]
: unknown
: unknown
: unknown
: never
export default class PostgrestFilterBuilder<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
Result,
RelationName = unknown,
Relationships = unknown
> extends PostgrestTransformBuilder<Schema, Row, Result, RelationName, Relationships> {
/**
* Match only rows where `column` is equal to `value`.
*
* To check if the value of `column` is NULL, you should use `.is()` instead.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
eq<ColumnName extends string>(
column: ColumnName,
value: ResolveFilterValue<Schema, Row, ColumnName> extends never
? NonNullable<unknown>
: // We want to infer the type before wrapping it into a `NonNullable` to avoid too deep
// type resolution error
ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? NonNullable<ResolvedFilterValue>
: // We should never enter this case as all the branches are covered above
never
): this {
this.url.searchParams.append(column, `eq.${value}`)
return this
}
/**
* Match only rows where `column` is not equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
neq<ColumnName extends string>(
column: ColumnName,
value: ResolveFilterValue<Schema, Row, ColumnName> extends never
? unknown
: ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? ResolvedFilterValue
: never
): this {
this.url.searchParams.append(column, `neq.${value}`)
return this
}
gt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
gt(column: string, value: unknown): this
/**
* Match only rows where `column` is greater than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gt(column: string, value: unknown): this {
this.url.searchParams.append(column, `gt.${value}`)
return this
}
gte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
gte(column: string, value: unknown): this
/**
* Match only rows where `column` is greater than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gte(column: string, value: unknown): this {
this.url.searchParams.append(column, `gte.${value}`)
return this
}
lt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
lt(column: string, value: unknown): this
/**
* Match only rows where `column` is less than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lt(column: string, value: unknown): this {
this.url.searchParams.append(column, `lt.${value}`)
return this
}
lte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
lte(column: string, value: unknown): this
/**
* Match only rows where `column` is less than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lte(column: string, value: unknown): this {
this.url.searchParams.append(column, `lte.${value}`)
return this
}
like<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this
like(column: string, pattern: string): this
/**
* Match only rows where `column` matches `pattern` case-sensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
like(column: string, pattern: string): this {
this.url.searchParams.append(column, `like.${pattern}`)
return this
}
likeAllOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
likeAllOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches all of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAllOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `like(all).{${patterns.join(',')}}`)
return this
}
likeAnyOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
likeAnyOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches any of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAnyOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `like(any).{${patterns.join(',')}}`)
return this
}
ilike<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this
ilike(column: string, pattern: string): this
/**
* Match only rows where `column` matches `pattern` case-insensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
ilike(column: string, pattern: string): this {
this.url.searchParams.append(column, `ilike.${pattern}`)
return this
}
ilikeAllOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
ilikeAllOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches all of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAllOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `ilike(all).{${patterns.join(',')}}`)
return this
}
ilikeAnyOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
ilikeAnyOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches any of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAnyOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `ilike(any).{${patterns.join(',')}}`)
return this
}
is<ColumnName extends string & keyof Row>(
column: ColumnName,
value: Row[ColumnName] & (boolean | null)
): this
is(column: string, value: boolean | null): this
/**
* Match only rows where `column` IS `value`.
*
* For non-boolean columns, this is only relevant for checking if the value of
* `column` is NULL by setting `value` to `null`.
*
* For boolean columns, you can also set `value` to `true` or `false` and it
* will behave the same way as `.eq()`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
is(column: string, value: boolean | null): this {
this.url.searchParams.append(column, `is.${value}`)
return this
}
/**
* Match only rows where `column` is included in the `values` array.
*
* @param column - The column to filter on
* @param values - The values array to filter with
*/
in<ColumnName extends string>(
column: ColumnName,
values: ReadonlyArray<
ResolveFilterValue<Schema, Row, ColumnName> extends never
? unknown
: // We want to infer the type before wrapping it into a `NonNullable` to avoid too deep
// type resolution error
ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? ResolvedFilterValue
: // We should never enter this case as all the branches are covered above
never
>
): this {
const cleanedValues = Array.from(new Set(values))
.map((s) => {
// handle postgrest reserved characters
// https://postgrest.org/en/v7.0.0/api.html#reserved-characters
if (typeof s === 'string' && new RegExp('[,()]').test(s)) return `"${s}"`
else return `${s}`
})
.join(',')
this.url.searchParams.append(column, `in.(${cleanedValues})`)
return this
}
contains<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>
): this
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* `column` contains every element appearing in `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this {
if (typeof value === 'string') {
// range types can be inclusive '[', ']' or exclusive '(', ')' so just
// keep it simple and accept a string
this.url.searchParams.append(column, `cs.${value}`)
} else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cs.{${value.join(',')}}`)
} else {
// json
this.url.searchParams.append(column, `cs.${JSON.stringify(value)}`)
}
return this
}
containedBy<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>
): this
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* every element appearing in `column` is contained by `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `cd.${value}`)
} else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cd.{${value.join(',')}}`)
} else {
// json
this.url.searchParams.append(column, `cd.${JSON.stringify(value)}`)
}
return this
}
rangeGt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeGt(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is greater than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGt(column: string, range: string): this {
this.url.searchParams.append(column, `sr.${range}`)
return this
}
rangeGte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeGte(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or greater than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGte(column: string, range: string): this {
this.url.searchParams.append(column, `nxl.${range}`)
return this
}
rangeLt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeLt(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is less than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLt(column: string, range: string): this {
this.url.searchParams.append(column, `sl.${range}`)
return this
}
rangeLte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeLte(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or less than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLte(column: string, range: string): this {
this.url.searchParams.append(column, `nxr.${range}`)
return this
}
rangeAdjacent<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeAdjacent(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where `column` is
* mutually exclusive to `range` and there can be no element between the two
* ranges.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeAdjacent(column: string, range: string): this {
this.url.searchParams.append(column, `adj.${range}`)
return this
}
overlaps<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]>
): this
overlaps(column: string, value: string | readonly unknown[]): this
/**
* Only relevant for array and range columns. Match only rows where
* `column` and `value` have an element in common.
*
* @param column - The array or range column to filter on
* @param value - The array or range value to filter with
*/
overlaps(column: string, value: string | readonly unknown[]): this {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `ov.${value}`)
} else {
// array
this.url.searchParams.append(column, `ov.{${value.join(',')}}`)
}
return this
}
textSearch<ColumnName extends string & keyof Row>(
column: ColumnName,
query: string,
options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch' }
): this
textSearch(
column: string,
query: string,
options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch' }
): this
/**
* Only relevant for text and tsvector columns. Match only rows where
* `column` matches the query string in `query`.
*
* @param column - The text or tsvector column to filter on
* @param query - The query text to match with
* @param options - Named parameters
* @param options.config - The text search configuration to use
* @param options.type - Change how the `query` text is interpreted
*/
textSearch(
column: string,
query: string,
{ config, type }: { config?: string; type?: 'plain' | 'phrase' | 'websearch' } = {}
): this {
let typePart = ''
if (type === 'plain') {
typePart = 'pl'
} else if (type === 'phrase') {
typePart = 'ph'
} else if (type === 'websearch') {
typePart = 'w'
}
const configPart = config === undefined ? '' : `(${config})`
this.url.searchParams.append(column, `${typePart}fts${configPart}.${query}`)
return this
}
match<ColumnName extends string & keyof Row>(query: Record<ColumnName, Row[ColumnName]>): this
match(query: Record<string, unknown>): this
/**
* Match only rows where each column in `query` keys is equal to its
* associated value. Shorthand for multiple `.eq()`s.
*
* @param query - The object to filter with, with column names as keys mapped
* to their filter values
*/
match(query: Record<string, unknown>): this {
Object.entries(query).forEach(([column, value]) => {
this.url.searchParams.append(column, `eq.${value}`)
})
return this
}
not<ColumnName extends string & keyof Row>(
column: ColumnName,
operator: FilterOperator,
value: Row[ColumnName]
): this
not(column: string, operator: string, value: unknown): this
/**
* Match only rows which doesn't satisfy the filter.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to be negated to filter with, following
* PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
not(column: string, operator: string, value: unknown): this {
this.url.searchParams.append(column, `not.${operator}.${value}`)
return this
}
/**
* Match only rows which satisfy at least one of the filters.
*
* Unlike most filters, `filters` is used as-is and needs to follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure it's properly sanitized.
*
* It's currently not possible to do an `.or()` filter across multiple tables.
*
* @param filters - The filters to use, following PostgREST syntax
* @param options - Named parameters
* @param options.referencedTable - Set this to filter on referenced tables
* instead of the parent table
* @param options.foreignTable - Deprecated, use `referencedTable` instead
*/
or(
filters: string,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const key = referencedTable ? `${referencedTable}.or` : 'or'
this.url.searchParams.append(key, `(${filters})`)
return this
}
filter<ColumnName extends string & keyof Row>(
column: ColumnName,
operator: `${'' | 'not.'}${FilterOperator}`,
value: unknown
): this
filter(column: string, operator: string, value: unknown): this
/**
* Match only rows which satisfy the filter. This is an escape hatch - you
* should use the specific filter methods wherever possible.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to filter with, following PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
filter(column: string, operator: string, value: unknown): this {
this.url.searchParams.append(column, `${operator}.${value}`)
return this
}
}

View File

@ -0,0 +1,381 @@
import PostgrestBuilder from './PostgrestBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import { GetResult } from './select-query-parser/result'
import { Fetch, GenericSchema, GenericTable, GenericView } from './types'
export default class PostgrestQueryBuilder<
Schema extends GenericSchema,
Relation extends GenericTable | GenericView,
RelationName = unknown,
Relationships = Relation extends { Relationships: infer R } ? R : unknown
> {
url: URL
headers: Record<string, string>
schema?: string
signal?: AbortSignal
fetch?: Fetch
constructor(
url: URL,
{
headers = {},
schema,
fetch,
}: {
headers?: Record<string, string>
schema?: string
fetch?: Fetch
}
) {
this.url = url
this.headers = headers
this.schema = schema
this.fetch = fetch
}
/**
* Perform a SELECT query on the table or view.
*
* @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName`
*
* @param options - Named parameters
*
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
*
* @param options.count - Count algorithm to use to count rows in the table or view.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
select<
Query extends string = '*',
ResultOne = GetResult<Schema, Relation['Row'], RelationName, Relationships, Query>
>(
columns?: Query,
{
head = false,
count,
}: {
head?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], ResultOne[], RelationName, Relationships> {
const method = head ? 'HEAD' : 'GET'
// Remove whitespaces except when quoted
let quoted = false
const cleanedColumns = (columns ?? '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return ''
}
if (c === '"') {
quoted = !quoted
}
return c
})
.join('')
this.url.searchParams.set('select', cleanedColumns)
if (count) {
this.headers['Prefer'] = `count=${count}`
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<ResultOne[]>)
}
// TODO(v3): Make `defaultToNull` consistent for both single & bulk inserts.
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row,
options?: {
count?: 'exact' | 'planned' | 'estimated'
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row[],
options?: {
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
/**
* Perform an INSERT into the table or view.
*
* By default, inserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to insert. Pass an object to insert a single row
* or an array to insert multiple rows.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count inserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. Only applies for bulk
* inserts.
*/
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row | Row[],
{
count,
defaultToNull = true,
}: {
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'POST'
const prefersHeaders = []
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (!defaultToNull) {
prefersHeaders.push('missing=default')
}
this.headers['Prefer'] = prefersHeaders.join(',')
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), [] as string[])
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`)
this.url.searchParams.set('columns', uniqueColumns.join(','))
}
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
// TODO(v3): Make `defaultToNull` consistent for both single & bulk upserts.
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row,
options?: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row[],
options?: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
/**
* Perform an UPSERT on the table or view. Depending on the column(s) passed
* to `onConflict`, `.upsert()` allows you to perform the equivalent of
* `.insert()` if a row with the corresponding `onConflict` columns doesn't
* exist, or if it does exist, perform an alternative action depending on
* `ignoreDuplicates`.
*
* By default, upserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to upsert with. Pass an object to upsert a
* single row or an array to upsert multiple rows.
*
* @param options - Named parameters
*
* @param options.onConflict - Comma-separated UNIQUE column(s) to specify how
* duplicate rows are determined. Two rows are duplicates if all the
* `onConflict` columns are equal.
*
* @param options.ignoreDuplicates - If `true`, duplicate rows are ignored. If
* `false`, duplicate rows are merged with existing rows.
*
* @param options.count - Count algorithm to use to count upserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. This only applies when
* inserting new rows, not when merging with existing rows under
* `ignoreDuplicates: false`. This also only applies when doing bulk upserts.
*/
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row | Row[],
{
onConflict,
ignoreDuplicates = false,
count,
defaultToNull = true,
}: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'POST'
const prefersHeaders = [`resolution=${ignoreDuplicates ? 'ignore' : 'merge'}-duplicates`]
if (onConflict !== undefined) this.url.searchParams.set('on_conflict', onConflict)
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (!defaultToNull) {
prefersHeaders.push('missing=default')
}
this.headers['Prefer'] = prefersHeaders.join(',')
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), [] as string[])
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`)
this.url.searchParams.set('columns', uniqueColumns.join(','))
}
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
/**
* Perform an UPDATE on the table or view.
*
* By default, updated rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param values - The values to update with
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count updated rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
update<Row extends Relation extends { Update: unknown } ? Relation['Update'] : never>(
values: Row,
{
count,
}: {
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'PATCH'
const prefersHeaders = []
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
this.headers['Prefer'] = prefersHeaders.join(',')
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
/**
* Perform a DELETE on the table or view.
*
* By default, deleted rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count deleted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
delete({
count,
}: {
count?: 'exact' | 'planned' | 'estimated'
} = {}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'DELETE'
const prefersHeaders = []
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (this.headers['Prefer']) {
prefersHeaders.unshift(this.headers['Prefer'])
}
this.headers['Prefer'] = prefersHeaders.join(',')
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
}

View File

@ -0,0 +1,327 @@
import PostgrestBuilder from './PostgrestBuilder'
import { GetResult } from './select-query-parser/result'
import { GenericSchema, CheckMatchingArrayTypes } from './types'
export default class PostgrestTransformBuilder<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
Result,
RelationName = unknown,
Relationships = unknown
> extends PostgrestBuilder<Result> {
/**
* Perform a SELECT on the query result.
*
* By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not
* return modified rows. By calling this method, modified rows are returned in
* `data`.
*
* @param columns - The columns to retrieve, separated by commas
*/
select<
Query extends string = '*',
NewResultOne = GetResult<Schema, Row, RelationName, Relationships, Query>
>(
columns?: Query
): PostgrestTransformBuilder<Schema, Row, NewResultOne[], RelationName, Relationships> {
// Remove whitespaces except when quoted
let quoted = false
const cleanedColumns = (columns ?? '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return ''
}
if (c === '"') {
quoted = !quoted
}
return c
})
.join('')
this.url.searchParams.set('select', cleanedColumns)
if (this.headers['Prefer']) {
this.headers['Prefer'] += ','
}
this.headers['Prefer'] += 'return=representation'
return this as unknown as PostgrestTransformBuilder<
Schema,
Row,
NewResultOne[],
RelationName,
Relationships
>
}
order<ColumnName extends string & keyof Row>(
column: ColumnName,
options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: undefined }
): this
order(
column: string,
options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: string }
): this
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order<ColumnName extends string & keyof Row>(
column: ColumnName,
options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: undefined }
): this
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order(
column: string,
options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: string }
): this
/**
* Order the query result by `column`.
*
* You can call this method multiple times to order by multiple columns.
*
* You can order referenced tables, but it only affects the ordering of the
* parent table if you use `!inner` in the query.
*
* @param column - The column to order by
* @param options - Named parameters
* @param options.ascending - If `true`, the result will be in ascending order
* @param options.nullsFirst - If `true`, `null`s appear first. If `false`,
* `null`s appear last.
* @param options.referencedTable - Set this to order a referenced table by
* its columns
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
order(
column: string,
{
ascending = true,
nullsFirst,
foreignTable,
referencedTable = foreignTable,
}: {
ascending?: boolean
nullsFirst?: boolean
foreignTable?: string
referencedTable?: string
} = {}
): this {
const key = referencedTable ? `${referencedTable}.order` : 'order'
const existingOrder = this.url.searchParams.get(key)
this.url.searchParams.set(
key,
`${existingOrder ? `${existingOrder},` : ''}${column}.${ascending ? 'asc' : 'desc'}${
nullsFirst === undefined ? '' : nullsFirst ? '.nullsfirst' : '.nullslast'
}`
)
return this
}
/**
* Limit the query result by `count`.
*
* @param count - The maximum number of rows to return
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
limit(
count: number,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const key = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`
this.url.searchParams.set(key, `${count}`)
return this
}
/**
* Limit the query result by starting at an offset `from` and ending at the offset `to`.
* Only records within this range are returned.
* This respects the query order and if there is no order clause the range could behave unexpectedly.
* The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third
* and fourth rows of the query.
*
* @param from - The starting index from which to limit the result
* @param to - The last index to which to limit the result
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
range(
from: number,
to: number,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const keyOffset =
typeof referencedTable === 'undefined' ? 'offset' : `${referencedTable}.offset`
const keyLimit = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`
this.url.searchParams.set(keyOffset, `${from}`)
// Range is inclusive, so add 1
this.url.searchParams.set(keyLimit, `${to - from + 1}`)
return this
}
/**
* Set the AbortSignal for the fetch request.
*
* @param signal - The AbortSignal to use for the fetch request
*/
abortSignal(signal: AbortSignal): this {
this.signal = signal
return this
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be one row (e.g. using `.limit(1)`), otherwise this
* returns an error.
*/
single<
ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never
>(): PostgrestBuilder<ResultOne> {
this.headers['Accept'] = 'application/vnd.pgrst.object+json'
return this as unknown as PostgrestBuilder<ResultOne>
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be zero or one row (e.g. using `.limit(1)`), otherwise
* this returns an error.
*/
maybeSingle<
ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never
>(): PostgrestBuilder<ResultOne | null> {
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.method === 'GET') {
this.headers['Accept'] = 'application/json'
} else {
this.headers['Accept'] = 'application/vnd.pgrst.object+json'
}
this.isMaybeSingle = true
return this as unknown as PostgrestBuilder<ResultOne | null>
}
/**
* Return `data` as a string in CSV format.
*/
csv(): PostgrestBuilder<string> {
this.headers['Accept'] = 'text/csv'
return this as unknown as PostgrestBuilder<string>
}
/**
* Return `data` as an object in [GeoJSON](https://geojson.org) format.
*/
geojson(): PostgrestBuilder<Record<string, unknown>> {
this.headers['Accept'] = 'application/geo+json'
return this as unknown as PostgrestBuilder<Record<string, unknown>>
}
/**
* Return `data` as the EXPLAIN plan for the query.
*
* You need to enable the
* [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain)
* setting before using this method.
*
* @param options - Named parameters
*
* @param options.analyze - If `true`, the query will be executed and the
* actual run time will be returned
*
* @param options.verbose - If `true`, the query identifier will be returned
* and `data` will include the output columns of the query
*
* @param options.settings - If `true`, include information on configuration
* parameters that affect query planning
*
* @param options.buffers - If `true`, include information on buffer usage
*
* @param options.wal - If `true`, include information on WAL record generation
*
* @param options.format - The format of the output, can be `"text"` (default)
* or `"json"`
*/
explain({
analyze = false,
verbose = false,
settings = false,
buffers = false,
wal = false,
format = 'text',
}: {
analyze?: boolean
verbose?: boolean
settings?: boolean
buffers?: boolean
wal?: boolean
format?: 'json' | 'text'
} = {}): PostgrestBuilder<Record<string, unknown>[]> | PostgrestBuilder<string> {
const options = [
analyze ? 'analyze' : null,
verbose ? 'verbose' : null,
settings ? 'settings' : null,
buffers ? 'buffers' : null,
wal ? 'wal' : null,
]
.filter(Boolean)
.join('|')
// An Accept header can carry multiple media types but postgrest-js always sends one
const forMediatype = this.headers['Accept'] ?? 'application/json'
this.headers[
'Accept'
] = `application/vnd.pgrst.plan+${format}; for="${forMediatype}"; options=${options};`
if (format === 'json') return this as unknown as PostgrestBuilder<Record<string, unknown>[]>
else return this as unknown as PostgrestBuilder<string>
}
/**
* Rollback the query.
*
* `data` will still be returned, but the query is not committed.
*/
rollback(): this {
if ((this.headers['Prefer'] ?? '').trim().length > 0) {
this.headers['Prefer'] += ',tx=rollback'
} else {
this.headers['Prefer'] = 'tx=rollback'
}
return this
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestTransformBuilder<
Schema,
Row,
CheckMatchingArrayTypes<Result, NewResult>,
RelationName,
Relationships
> {
return this as unknown as PostgrestTransformBuilder<
Schema,
Row,
CheckMatchingArrayTypes<Result, NewResult>,
RelationName,
Relationships
>
}
}

2
node_modules/@supabase/postgrest-js/src/constants.ts generated vendored Normal file
View File

@ -0,0 +1,2 @@
import { version } from './version'
export const DEFAULT_HEADERS = { 'X-Client-Info': `postgrest-js/${version}` }

34
node_modules/@supabase/postgrest-js/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,34 @@
// Always update wrapper.mjs when updating this file.
import PostgrestClient from './PostgrestClient'
import PostgrestQueryBuilder from './PostgrestQueryBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import PostgrestTransformBuilder from './PostgrestTransformBuilder'
import PostgrestBuilder from './PostgrestBuilder'
import PostgrestError from './PostgrestError'
export {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}
export default {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}
export type {
PostgrestResponse,
PostgrestResponseFailure,
PostgrestResponseSuccess,
PostgrestSingleResponse,
PostgrestMaybeSingleResponse,
} from './types'
// https://github.com/supabase/postgrest-js/issues/551
// To be replaced with a helper type that only uses public types
export type { GetResult as UnstableGetResult } from './select-query-parser/result'

View File

@ -0,0 +1,469 @@
// Credits to @bnjmnt4n (https://www.npmjs.com/package/postgrest-query)
// See https://github.com/PostgREST/postgrest/blob/2f91853cb1de18944a4556df09e52450b881cfb3/src/PostgREST/ApiRequest/QueryParams.hs#L282-L284
import { SimplifyDeep } from '../types'
import { JsonPathToAccessor } from './utils'
/**
* Parses a query.
* A query is a sequence of nodes, separated by `,`, ensuring that there is
* no remaining input after all nodes have been parsed.
*
* Returns an array of parsed nodes, or an error.
*/
export type ParseQuery<Query extends string> = string extends Query
? GenericStringError
: ParseNodes<EatWhitespace<Query>> extends [infer Nodes, `${infer Remainder}`]
? Nodes extends Ast.Node[]
? EatWhitespace<Remainder> extends ''
? SimplifyDeep<Nodes>
: ParserError<`Unexpected input: ${Remainder}`>
: ParserError<'Invalid nodes array structure'>
: ParseNodes<EatWhitespace<Query>>
/**
* Notes: all `Parse*` types assume that their input strings have their whitespace
* removed. They return tuples of ["Return Value", "Remainder of text"] or
* a `ParserError`.
*/
/**
* Parses a sequence of nodes, separated by `,`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"] or an error.
*/
type ParseNodes<Input extends string> = string extends Input
? GenericStringError
: ParseNodesHelper<Input, []>
type ParseNodesHelper<Input extends string, Nodes extends Ast.Node[]> = ParseNode<Input> extends [
infer Node,
`${infer Remainder}`
]
? Node extends Ast.Node
? EatWhitespace<Remainder> extends `,${infer Remainder}`
? ParseNodesHelper<EatWhitespace<Remainder>, [...Nodes, Node]>
: [[...Nodes, Node], EatWhitespace<Remainder>]
: ParserError<'Invalid node type in nodes helper'>
: ParseNode<Input>
/**
* Parses a node.
* A node is one of the following:
* - `*`
* - a field, as defined above
* - a renamed field, `renamed_field:field`
* - a spread field, `...field`
*/
type ParseNode<Input extends string> = Input extends ''
? ParserError<'Empty string'>
: // `*`
Input extends `*${infer Remainder}`
? [Ast.StarNode, EatWhitespace<Remainder>]
: // `...field`
Input extends `...${infer Remainder}`
? ParseField<EatWhitespace<Remainder>> extends [infer TargetField, `${infer Remainder}`]
? TargetField extends Ast.FieldNode
? [{ type: 'spread'; target: TargetField }, EatWhitespace<Remainder>]
: ParserError<'Invalid target field type in spread'>
: ParserError<`Unable to parse spread resource at \`${Input}\``>
: ParseIdentifier<Input> extends [infer NameOrAlias, `${infer Remainder}`]
? EatWhitespace<Remainder> extends `::${infer _}`
? // It's a type cast and not an alias, so treat it as part of the field.
ParseField<Input>
: EatWhitespace<Remainder> extends `:${infer Remainder}`
? // `alias:`
ParseField<EatWhitespace<Remainder>> extends [infer Field, `${infer Remainder}`]
? Field extends Ast.FieldNode
? [Omit<Field, 'alias'> & { alias: NameOrAlias }, EatWhitespace<Remainder>]
: ParserError<'Invalid field type in alias parsing'>
: ParserError<`Unable to parse renamed field at \`${Input}\``>
: // Otherwise, just parse it as a field without alias.
ParseField<Input>
: ParserError<`Expected identifier at \`${Input}\``>
/**
* Parses a field without preceding alias.
* A field is one of the following:
* - a top-level `count` field: https://docs.postgrest.org/en/v12/references/api/aggregate_functions.html#the-case-of-count
* - a field with an embedded resource
* - `field(nodes)`
* - `field!hint(nodes)`
* - `field!inner(nodes)`
* - `field!left(nodes)`
* - `field!hint!inner(nodes)`
* - `field!hint!left(nodes)`
* - a field without an embedded resource (see {@link ParseNonEmbeddedResourceField})
*/
type ParseField<Input extends string> = Input extends ''
? ParserError<'Empty string'>
: ParseIdentifier<Input> extends [infer Name, `${infer Remainder}`]
? Name extends 'count'
? ParseCountField<Input>
: Remainder extends `!inner${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field!inner(nodes)`
[{ type: 'field'; name: Name; innerJoin: true; children: Children }, Remainder]
: ParserError<'Invalid children array in inner join'>
: CreateParserErrorIfRequired<
ParseEmbeddedResource<EatWhitespace<Remainder>>,
`Expected embedded resource after "!inner" at \`${Remainder}\``
>
: EatWhitespace<Remainder> extends `!left${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field!left(nodes)`
// !left is a noise word - treat it the same way as a non-`!inner`.
[{ type: 'field'; name: Name; children: Children }, EatWhitespace<Remainder>]
: ParserError<'Invalid children array in left join'>
: CreateParserErrorIfRequired<
ParseEmbeddedResource<EatWhitespace<Remainder>>,
`Expected embedded resource after "!left" at \`${EatWhitespace<Remainder>}\``
>
: EatWhitespace<Remainder> extends `!${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [infer Hint, `${infer Remainder}`]
? EatWhitespace<Remainder> extends `!inner${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
]
? Children extends Ast.Node[]
? // `field!hint!inner(nodes)`
[
{ type: 'field'; name: Name; hint: Hint; innerJoin: true; children: Children },
EatWhitespace<Remainder>
]
: ParserError<'Invalid children array in hint inner join'>
: ParseEmbeddedResource<EatWhitespace<Remainder>>
: ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
]
? Children extends Ast.Node[]
? // `field!hint(nodes)`
[
{ type: 'field'; name: Name; hint: Hint; children: Children },
EatWhitespace<Remainder>
]
: ParserError<'Invalid children array in hint'>
: ParseEmbeddedResource<EatWhitespace<Remainder>>
: ParserError<`Expected identifier after "!" at \`${EatWhitespace<Remainder>}\``>
: EatWhitespace<Remainder> extends `(${infer _}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field(nodes)`
[{ type: 'field'; name: Name; children: Children }, EatWhitespace<Remainder>]
: ParserError<'Invalid children array in field'>
: // Return error if start of embedded resource was detected but not found.
ParseEmbeddedResource<EatWhitespace<Remainder>>
: // Otherwise it's a non-embedded resource field.
ParseNonEmbeddedResourceField<Input>
: ParserError<`Expected identifier at \`${Input}\``>
type ParseCountField<Input extends string> = ParseIdentifier<Input> extends [
'count',
`${infer Remainder}`
]
? (
EatWhitespace<Remainder> extends `()${infer Remainder_}`
? EatWhitespace<Remainder_>
: EatWhitespace<Remainder>
) extends `${infer Remainder}`
? Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [
{ type: 'field'; name: 'count'; aggregateFunction: 'count'; castType: CastType },
Remainder
]
: ParseFieldTypeCast<Remainder>
: [{ type: 'field'; name: 'count'; aggregateFunction: 'count' }, Remainder]
: never
: ParserError<`Expected "count" at \`${Input}\``>
/**
* Parses an embedded resource, which is an opening `(`, followed by a sequence of
* 0 or more nodes separated by `,`, then a closing `)`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"], an error,
* or the original string input indicating that no opening `(` was found.
*/
type ParseEmbeddedResource<Input extends string> = Input extends `(${infer Remainder}`
? EatWhitespace<Remainder> extends `)${infer Remainder}`
? [[], EatWhitespace<Remainder>]
: ParseNodes<EatWhitespace<Remainder>> extends [infer Nodes, `${infer Remainder}`]
? Nodes extends Ast.Node[]
? EatWhitespace<Remainder> extends `)${infer Remainder}`
? [Nodes, EatWhitespace<Remainder>]
: ParserError<`Expected ")" at \`${EatWhitespace<Remainder>}\``>
: ParserError<'Invalid nodes array in embedded resource'>
: ParseNodes<EatWhitespace<Remainder>>
: ParserError<`Expected "(" at \`${Input}\``>
/**
* Parses a field excluding embedded resources, without preceding field renaming.
* This is one of the following:
* - `field`
* - `field.aggregate()`
* - `field.aggregate()::type`
* - `field::type`
* - `field::type.aggregate()`
* - `field::type.aggregate()::type`
* - `field->json...`
* - `field->json.aggregate()`
* - `field->json.aggregate()::type`
* - `field->json::type`
* - `field->json::type.aggregate()`
* - `field->json::type.aggregate()::type`
*/
type ParseNonEmbeddedResourceField<Input extends string> = ParseIdentifier<Input> extends [
infer Name,
`${infer Remainder}`
]
? // Parse optional JSON path.
(
Remainder extends `->${infer PathAndRest}`
? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
]
? [
{
type: 'field'
name: Name
alias: PropertyName
castType: PropertyType
jsonPath: JsonPathToAccessor<
PathAndRest extends `${infer Path},${string}` ? Path : PathAndRest
>
},
Remainder
]
: ParseJsonAccessor<Remainder>
: [{ type: 'field'; name: Name }, Remainder]
) extends infer Parsed
? Parsed extends [infer Field, `${infer Remainder}`]
? // Parse optional typecast or aggregate function input typecast.
(
Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [Omit<Field, 'castType'> & { castType: CastType }, Remainder]
: ParseFieldTypeCast<Remainder>
: [Field, Remainder]
) extends infer Parsed
? Parsed extends [infer Field, `${infer Remainder}`]
? // Parse optional aggregate function.
Remainder extends `.${infer _}`
? ParseFieldAggregation<Remainder> extends [
infer AggregateFunction,
`${infer Remainder}`
]
? // Parse optional aggregate function output typecast.
Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [
Omit<Field, 'castType'> & {
aggregateFunction: AggregateFunction
castType: CastType
},
Remainder
]
: ParseFieldTypeCast<Remainder>
: [Field & { aggregateFunction: AggregateFunction }, Remainder]
: ParseFieldAggregation<Remainder>
: [Field, Remainder]
: Parsed
: never
: Parsed
: never
: ParserError<`Expected identifier at \`${Input}\``>
/**
* Parses a JSON property accessor of the shape `->a->b->c`. The last accessor in
* the series may convert to text by using the ->> operator instead of ->.
*
* Returns a tuple of ["Last property name", "Last property type", "Remainder of text"]
*/
type ParseJsonAccessor<Input extends string> = Input extends `->${infer Remainder}`
? Remainder extends `>${infer Remainder}`
? ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`]
? [Name, 'text', EatWhitespace<Remainder>]
: ParserError<'Expected property name after `->>`'>
: ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`]
? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
]
? [PropertyName, PropertyType, EatWhitespace<Remainder>]
: [Name, 'json', EatWhitespace<Remainder>]
: ParserError<'Expected property name after `->`'>
: ParserError<'Expected ->'>
/**
* Parses a field typecast (`::type`), returning a tuple of ["Type", "Remainder of text"].
*/
type ParseFieldTypeCast<Input extends string> = EatWhitespace<Input> extends `::${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [`${infer CastType}`, `${infer Remainder}`]
? [CastType, EatWhitespace<Remainder>]
: ParserError<`Invalid type for \`::\` operator at \`${Remainder}\``>
: ParserError<'Expected ::'>
/**
* Parses a field aggregation (`.max()`), returning a tuple of ["Aggregate function", "Remainder of text"]
*/
type ParseFieldAggregation<Input extends string> =
EatWhitespace<Input> extends `.${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [
`${infer FunctionName}`,
`${infer Remainder}`
]
? // Ensure that aggregation function is valid.
FunctionName extends Token.AggregateFunction
? EatWhitespace<Remainder> extends `()${infer Remainder}`
? [FunctionName, EatWhitespace<Remainder>]
: ParserError<`Expected \`()\` after \`.\` operator \`${FunctionName}\``>
: ParserError<`Invalid type for \`.\` operator \`${FunctionName}\``>
: ParserError<`Invalid type for \`.\` operator at \`${Remainder}\``>
: ParserError<'Expected .'>
/**
* Parses a (possibly double-quoted) identifier.
* Identifiers are sequences of 1 or more letters.
*/
type ParseIdentifier<Input extends string> = ParseLetters<Input> extends [
infer Name,
`${infer Remainder}`
]
? [Name, EatWhitespace<Remainder>]
: ParseQuotedLetters<Input> extends [infer Name, `${infer Remainder}`]
? [Name, EatWhitespace<Remainder>]
: ParserError<`No (possibly double-quoted) identifier at \`${Input}\``>
/**
* Parse a consecutive sequence of 1 or more letter, where letters are `[0-9a-zA-Z_]`.
*/
type ParseLetters<Input extends string> = string extends Input
? GenericStringError
: ParseLettersHelper<Input, ''> extends [`${infer Letters}`, `${infer Remainder}`]
? Letters extends ''
? ParserError<`Expected letter at \`${Input}\``>
: [Letters, Remainder]
: ParseLettersHelper<Input, ''>
type ParseLettersHelper<Input extends string, Acc extends string> = string extends Input
? GenericStringError
: Input extends `${infer L}${infer Remainder}`
? L extends Token.Letter
? ParseLettersHelper<Remainder, `${Acc}${L}`>
: [Acc, Input]
: [Acc, '']
/**
* Parse a consecutive sequence of 1 or more double-quoted letters,
* where letters are `[^"]`.
*/
type ParseQuotedLetters<Input extends string> = string extends Input
? GenericStringError
: Input extends `"${infer Remainder}`
? ParseQuotedLettersHelper<Remainder, ''> extends [`${infer Letters}`, `${infer Remainder}`]
? Letters extends ''
? ParserError<`Expected string at \`${Remainder}\``>
: [Letters, Remainder]
: ParseQuotedLettersHelper<Remainder, ''>
: ParserError<`Not a double-quoted string at \`${Input}\``>
type ParseQuotedLettersHelper<Input extends string, Acc extends string> = string extends Input
? GenericStringError
: Input extends `${infer L}${infer Remainder}`
? L extends '"'
? [Acc, Remainder]
: ParseQuotedLettersHelper<Remainder, `${Acc}${L}`>
: ParserError<`Missing closing double-quote in \`"${Acc}${Input}\``>
/**
* Trims whitespace from the left of the input.
*/
type EatWhitespace<Input extends string> = string extends Input
? GenericStringError
: Input extends `${Token.Whitespace}${infer Remainder}`
? EatWhitespace<Remainder>
: Input
/**
* Creates a new {@link ParserError} if the given input is not already a parser error.
*/
type CreateParserErrorIfRequired<Input, Message extends string> = Input extends ParserError<string>
? Input
: ParserError<Message>
/**
* Parser errors.
*/
export type ParserError<Message extends string> = { error: true } & Message
type GenericStringError = ParserError<'Received a generic string'>
export namespace Ast {
export type Node = FieldNode | StarNode | SpreadNode
export type FieldNode = {
type: 'field'
name: string
alias?: string
hint?: string
innerJoin?: true
castType?: string
jsonPath?: string
aggregateFunction?: Token.AggregateFunction
children?: Node[]
}
export type StarNode = {
type: 'star'
}
export type SpreadNode = {
type: 'spread'
target: FieldNode & { children: Node[] }
}
}
namespace Token {
export type Whitespace = ' ' | '\n' | '\t'
type LowerAlphabet =
| 'a'
| 'b'
| 'c'
| 'd'
| 'e'
| 'f'
| 'g'
| 'h'
| 'i'
| 'j'
| 'k'
| 'l'
| 'm'
| 'n'
| 'o'
| 'p'
| 'q'
| 'r'
| 's'
| 't'
| 'u'
| 'v'
| 'w'
| 'x'
| 'y'
| 'z'
type Alphabet = LowerAlphabet | Uppercase<LowerAlphabet>
type Digit = '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0'
export type Letter = Alphabet | Digit | '_'
export type AggregateFunction = 'count' | 'sum' | 'avg' | 'min' | 'max'
}

View File

@ -0,0 +1,420 @@
import { GenericTable } from '../types'
import { ContainsNull, GenericRelationship, PostgreSQLTypes } from './types'
import { Ast, ParseQuery } from './parser'
import {
AggregateFunctions,
ExtractFirstProperty,
GenericSchema,
IsNonEmptyArray,
Prettify,
TablesAndViews,
TypeScriptTypes,
} from './types'
import {
CheckDuplicateEmbededReference,
GetFieldNodeResultName,
IsAny,
IsRelationNullable,
IsStringUnion,
JsonPathToType,
ResolveRelationship,
SelectQueryError,
} from './utils'
/**
* Main entry point for constructing the result type of a PostgREST query.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Query - The select query string literal to parse.
*/
export type GetResult<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName,
Relationships,
Query extends string
> = IsAny<Schema> extends true
? ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RelationName extends string
? ProcessNodesWithoutSchema<ParsedQuery>
: any
: ParsedQuery
: any
: Relationships extends null // For .rpc calls the passed relationships will be null in that case, the result will always be the function return type
? ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RPCCallNodes<ParsedQuery, RelationName extends string ? RelationName : 'rpc_call', Row>
: ParsedQuery
: Row
: ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RelationName extends string
? Relationships extends GenericRelationship[]
? ProcessNodes<Schema, Row, RelationName, Relationships, ParsedQuery>
: SelectQueryError<'Invalid Relationships cannot infer result type'>
: SelectQueryError<'Invalid RelationName cannot infer result type'>
: ParsedQuery
: never
type ProcessSimpleFieldWithoutSchema<Field extends Ast.FieldNode> =
Field['aggregateFunction'] extends AggregateFunctions
? {
// An aggregate function will always override the column name id.sum() will become sum
// except if it has been aliased
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? TypeScriptTypes<Field['castType']>
: number
}
: {
// Aliases override the property name in the result
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes // We apply the detected casted as the result type
? TypeScriptTypes<Field['castType']>
: any
}
type ProcessFieldNodeWithoutSchema<Node extends Ast.FieldNode> = IsNonEmptyArray<
Node['children']
> extends true
? {
[K in GetFieldNodeResultName<Node>]: Node['children'] extends Ast.Node[]
? ProcessNodesWithoutSchema<Node['children']>[]
: ProcessSimpleFieldWithoutSchema<Node>
}
: ProcessSimpleFieldWithoutSchema<Node>
/**
* Processes a single Node without schema and returns the resulting TypeScript type.
*/
type ProcessNodeWithoutSchema<Node extends Ast.Node> = Node extends Ast.StarNode
? any
: Node extends Ast.SpreadNode
? Node['target']['children'] extends Ast.StarNode[]
? any
: Node['target']['children'] extends Ast.FieldNode[]
? {
[P in Node['target']['children'][number] as GetFieldNodeResultName<P>]: P['castType'] extends PostgreSQLTypes
? TypeScriptTypes<P['castType']>
: any
}
: any
: Node extends Ast.FieldNode
? ProcessFieldNodeWithoutSchema<Node>
: any
/**
* Processes nodes when Schema is any, providing basic type inference
*/
type ProcessNodesWithoutSchema<
Nodes extends Ast.Node[],
Acc extends Record<string, unknown> = {}
> = Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessNodeWithoutSchema<FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? ProcessNodesWithoutSchema<RestNodes, Acc & FieldResult>
: FieldResult
: any
: any
: any
: Prettify<Acc>
/**
* Processes a single Node from a select chained after a rpc call
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current rpc function
* @param NodeType - The Node to process.
*/
export type ProcessRPCNode<
Row extends Record<string, unknown>,
RelationName extends string,
NodeType extends Ast.Node
> = NodeType['type'] extends Ast.StarNode['type'] // If the selection is *
? Row
: NodeType['type'] extends Ast.FieldNode['type']
? ProcessSimpleField<Row, RelationName, Extract<NodeType, Ast.FieldNode>>
: SelectQueryError<'RPC Unsupported node type.'>
/**
* Process select call that can be chained after an rpc call
*/
export type RPCCallNodes<
Nodes extends Ast.Node[],
RelationName extends string,
Row extends Record<string, unknown>,
Acc extends Record<string, unknown> = {} // Acc is now an object
> = Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessRPCNode<Row, RelationName, FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? RPCCallNodes<RestNodes, RelationName, Row, Acc & FieldResult>
: FieldResult extends SelectQueryError<infer E>
? SelectQueryError<E>
: SelectQueryError<'Could not retrieve a valid record or error value'>
: SelectQueryError<'Processing node failed.'>
: SelectQueryError<'Invalid rest nodes array in RPC call'>
: SelectQueryError<'Invalid first node in RPC call'>
: Prettify<Acc>
/**
* Recursively processes an array of Nodes and accumulates the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Nodes - An array of AST nodes to process.
* @param Acc - Accumulator for the constructed type.
*/
export type ProcessNodes<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.Node[],
Acc extends Record<string, unknown> = {} // Acc is now an object
> = CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes> extends false
? Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessNode<Schema, Row, RelationName, Relationships, FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? ProcessNodes<Schema, Row, RelationName, Relationships, RestNodes, Acc & FieldResult>
: FieldResult extends SelectQueryError<infer E>
? SelectQueryError<E>
: SelectQueryError<'Could not retrieve a valid record or error value'>
: SelectQueryError<'Processing node failed.'>
: SelectQueryError<'Invalid rest nodes array type in ProcessNodes'>
: SelectQueryError<'Invalid first node type in ProcessNodes'>
: Prettify<Acc>
: Prettify<CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes>>
/**
* Processes a single Node and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param NodeType - The Node to process.
*/
export type ProcessNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
NodeType extends Ast.Node
> =
// TODO: figure out why comparing the `type` property is necessary vs. `NodeType extends Ast.StarNode`
NodeType['type'] extends Ast.StarNode['type'] // If the selection is *
? Row
: NodeType['type'] extends Ast.SpreadNode['type'] // If the selection is a ...spread
? ProcessSpreadNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.SpreadNode>>
: NodeType['type'] extends Ast.FieldNode['type']
? ProcessFieldNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.FieldNode>>
: SelectQueryError<'Unsupported node type.'>
/**
* Processes a FieldNode and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
type ProcessFieldNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode
> = Field['children'] extends []
? {}
: IsNonEmptyArray<Field['children']> extends true // Has embedded resource?
? ProcessEmbeddedResource<Schema, Relationships, Field, RelationName>
: ProcessSimpleField<Row, RelationName, Field>
type ResolveJsonPathType<
Value,
Path extends string | undefined,
CastType extends PostgreSQLTypes
> = Path extends string
? JsonPathToType<Value, Path> extends never
? // Always fallback if JsonPathToType returns never
TypeScriptTypes<CastType>
: JsonPathToType<Value, Path> extends infer PathResult
? PathResult extends string
? // Use the result if it's a string as we know that even with the string accessor ->> it's a valid type
PathResult
: IsStringUnion<PathResult> extends true
? // Use the result if it's a union of strings
PathResult
: CastType extends 'json'
? // If the type is not a string, ensure it was accessed with json accessor ->
PathResult
: // Otherwise it means non-string value accessed with string accessor ->> use the TypeScriptTypes result
TypeScriptTypes<CastType>
: TypeScriptTypes<CastType>
: // No json path, use regular type casting
TypeScriptTypes<CastType>
/**
* Processes a simple field (without embedded resources).
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Field - The FieldNode to process.
*/
type ProcessSimpleField<
Row extends Record<string, unknown>,
RelationName extends string,
Field extends Ast.FieldNode
> = Field['name'] extends keyof Row | 'count'
? Field['aggregateFunction'] extends AggregateFunctions
? {
// An aggregate function will always override the column name id.sum() will become sum
// except if it has been aliased
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? TypeScriptTypes<Field['castType']>
: number
}
: {
// Aliases override the property name in the result
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? ResolveJsonPathType<Row[Field['name']], Field['jsonPath'], Field['castType']>
: Row[Field['name']]
}
: SelectQueryError<`column '${Field['name']}' does not exist on '${RelationName}'.`>
/**
* Processes an embedded resource (relation).
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
export type ProcessEmbeddedResource<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = ResolveRelationship<Schema, Relationships, Field, CurrentTableOrView> extends infer Resolved
? Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>
relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' }
direction: string
}
? ProcessEmbeddedResourceResult<Schema, Resolved, Field, CurrentTableOrView>
: // Otherwise the Resolved is a SelectQueryError return it
{ [K in GetFieldNodeResultName<Field>]: Resolved }
: {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to resolve relationship.'> &
string
}
/**
* Helper type to process the result of an embedded resource.
*/
type ProcessEmbeddedResourceResult<
Schema extends GenericSchema,
Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>
relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' }
direction: string
},
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema>
> = ProcessNodes<
Schema,
Resolved['referencedTable']['Row'],
Field['name'],
Resolved['referencedTable']['Relationships'],
Field['children'] extends undefined
? []
: Exclude<Field['children'], undefined> extends Ast.Node[]
? Exclude<Field['children'], undefined>
: []
> extends infer ProcessedChildren
? {
[K in GetFieldNodeResultName<Field>]: Resolved['direction'] extends 'forward'
? Field extends { innerJoin: true }
? Resolved['relation']['isOneToOne'] extends true
? ProcessedChildren
: ProcessedChildren[]
: Resolved['relation']['isOneToOne'] extends true
? ProcessedChildren | null
: ProcessedChildren[]
: // If the relation is a self-reference it'll always be considered as reverse relationship
Resolved['relation']['referencedRelation'] extends CurrentTableOrView
? // It can either be a reverse reference via a column inclusion (eg: parent_id(*))
// in such case the result will be a single object
Resolved['relation']['match'] extends 'col'
? IsRelationNullable<
TablesAndViews<Schema>[CurrentTableOrView],
Resolved['relation']
> extends true
? ProcessedChildren | null
: ProcessedChildren
: // Or it can be a reference via the reference relation (eg: collections(*))
// in such case, the result will be an array of all the values (all collection with parent_id being the current id)
ProcessedChildren[]
: // Otherwise if it's a non self-reference reverse relationship it's a single object
IsRelationNullable<
TablesAndViews<Schema>[CurrentTableOrView],
Resolved['relation']
> extends true
? ProcessedChildren | null
: ProcessedChildren
}
: {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to process embedded resource nodes.'> &
string
}
/**
* Processes a SpreadNode by processing its target node.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Spread - The SpreadNode to process.
*/
type ProcessSpreadNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Spread extends Ast.SpreadNode
> = ProcessNode<Schema, Row, RelationName, Relationships, Spread['target']> extends infer Result
? Result extends SelectQueryError<infer E>
? SelectQueryError<E>
: ExtractFirstProperty<Result> extends unknown[]
? {
[K in Spread['target']['name']]: SelectQueryError<`"${RelationName}" and "${Spread['target']['name']}" do not form a many-to-one or one-to-one relationship spread not possible`>
}
: ProcessSpreadNodeResult<Result>
: never
/**
* Helper type to process the result of a spread node.
*/
type ProcessSpreadNodeResult<Result> = Result extends Record<
string,
SelectQueryError<string> | null
>
? Result
: ExtractFirstProperty<Result> extends infer SpreadedObject
? ContainsNull<SpreadedObject> extends true
? Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] | null }, null>
: Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] }, null>
: SelectQueryError<'An error occurred spreading the object'>

View File

@ -0,0 +1,115 @@
import type { GenericRelationship, GenericSchema, GenericTable, Prettify } from '../types'
export type { GenericRelationship, GenericSchema, GenericTable, Prettify }
export type AggregateWithoutColumnFunctions = 'count'
export type AggregateWithColumnFunctions =
| 'sum'
| 'avg'
| 'min'
| 'max'
| AggregateWithoutColumnFunctions
export type AggregateFunctions = AggregateWithColumnFunctions
export type Json =
| string
| number
| boolean
| null
| {
[key: string]: Json | undefined
}
| Json[]
type PostgresSQLNumberTypes = 'int2' | 'int4' | 'int8' | 'float4' | 'float8' | 'numeric'
type PostgresSQLStringTypes =
| 'bytea'
| 'bpchar'
| 'varchar'
| 'date'
| 'text'
| 'citext'
| 'time'
| 'timetz'
| 'timestamp'
| 'timestamptz'
| 'uuid'
| 'vector'
type SingleValuePostgreSQLTypes =
| PostgresSQLNumberTypes
| PostgresSQLStringTypes
| 'bool'
| 'json'
| 'jsonb'
| 'void'
| 'record'
| string
type ArrayPostgreSQLTypes = `_${SingleValuePostgreSQLTypes}`
type TypeScriptSingleValueTypes<T extends SingleValuePostgreSQLTypes> = T extends 'bool'
? boolean
: T extends PostgresSQLNumberTypes
? number
: T extends PostgresSQLStringTypes
? string
: T extends 'json' | 'jsonb'
? Json
: T extends 'void'
? undefined
: T extends 'record'
? Record<string, unknown>
: unknown
type StripUnderscore<T extends string> = T extends `_${infer U}` ? U : T
// Represents all possible PostgreSQL types, including array types, allow for custom types with 'string' in union
export type PostgreSQLTypes = SingleValuePostgreSQLTypes | ArrayPostgreSQLTypes
// Helper type to convert PostgreSQL types to their TypeScript equivalents
export type TypeScriptTypes<T extends PostgreSQLTypes> = T extends ArrayPostgreSQLTypes
? TypeScriptSingleValueTypes<StripUnderscore<Extract<T, SingleValuePostgreSQLTypes>>>[]
: TypeScriptSingleValueTypes<T>
// Utility types for working with unions
export type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (
k: infer I
) => void
? I
: never
export type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R
? R
: never
export type Push<T extends any[], V> = [...T, V]
// Converts a union type to a tuple type
export type UnionToTuple<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = N extends true
? []
: Push<UnionToTuple<Exclude<T, L>>, L>
export type UnionToArray<T> = UnionToTuple<T>
// Extracts the type of the first property in an object type
export type ExtractFirstProperty<T> = T extends { [K in keyof T]: infer U } ? U : never
// Type predicates
export type ContainsNull<T> = null extends T ? true : false
export type IsNonEmptyArray<T> = Exclude<T, undefined> extends readonly [unknown, ...unknown[]]
? true
: false
// Types for working with database schemas
export type TablesAndViews<Schema extends GenericSchema> = Schema['Tables'] &
Exclude<Schema['Views'], ''>
export type GetTableRelationships<
Schema extends GenericSchema,
Tname extends string
> = TablesAndViews<Schema>[Tname] extends { Relationships: infer R } ? R : false

View File

@ -0,0 +1,580 @@
import { Ast } from './parser'
import {
AggregateFunctions,
ContainsNull,
GenericRelationship,
GenericSchema,
GenericTable,
IsNonEmptyArray,
TablesAndViews,
UnionToArray,
} from './types'
export type IsAny<T> = 0 extends 1 & T ? true : false
export type SelectQueryError<Message extends string> = { error: true } & Message
/*
** Because of pg-meta types generation there is some cases where a same relationship can be duplicated
** if the relation is across schemas and views this ensure that we dedup those relations and treat them
** as postgrest would.
** This is no longer the case and has been patched here: https://github.com/supabase/postgres-meta/pull/809
** But we still need this for retro-compatibilty with older generated types
** TODO: Remove this in next major version
*/
export type DeduplicateRelationships<T extends readonly unknown[]> = T extends readonly [
infer First,
...infer Rest
]
? First extends Rest[number]
? DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>
: [First, ...DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>]
: T
export type GetFieldNodeResultName<Field extends Ast.FieldNode> = Field['alias'] extends string
? Field['alias']
: Field['aggregateFunction'] extends AggregateFunctions
? Field['aggregateFunction']
: Field['name']
type FilterRelationNodes<Nodes extends Ast.Node[]> = UnionToArray<
{
[K in keyof Nodes]: Nodes[K] extends Ast.SpreadNode
? Nodes[K]['target']
: Nodes[K] extends Ast.FieldNode
? IsNonEmptyArray<Nodes[K]['children']> extends true
? Nodes[K]
: never
: never
}[number]
>
type ResolveRelationships<
Schema extends GenericSchema,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.FieldNode[]
> = UnionToArray<{
[K in keyof Nodes]: Nodes[K] extends Ast.FieldNode
? ResolveRelationship<Schema, Relationships, Nodes[K], RelationName> extends infer Relation
? Relation extends {
relation: {
referencedRelation: string
foreignKeyName: string
match: string
}
from: string
}
? {
referencedTable: Relation['relation']['referencedRelation']
fkName: Relation['relation']['foreignKeyName']
from: Relation['from']
match: Relation['relation']['match']
fieldName: GetFieldNodeResultName<Nodes[K]>
}
: Relation
: never
: never
}>[0]
/**
* Checks if a relation is implicitly referenced twice, requiring disambiguation
*/
type IsDoubleReference<T, U> = T extends {
referencedTable: infer RT
fieldName: infer FN
match: infer M
}
? M extends 'col' | 'refrel'
? U extends { referencedTable: RT; fieldName: FN; match: M }
? true
: false
: false
: false
/**
* Compares one element with all other elements in the array to find duplicates
*/
type CheckDuplicates<Arr extends any[], Current> = Arr extends [infer Head, ...infer Tail]
? IsDoubleReference<Current, Head> extends true
? Head | CheckDuplicates<Tail, Current> // Return the Head if duplicate
: CheckDuplicates<Tail, Current> // Otherwise, continue checking
: never
/**
* Iterates over the elements of the array to find duplicates
*/
type FindDuplicatesWithinDeduplicated<Arr extends any[]> = Arr extends [infer Head, ...infer Tail]
? CheckDuplicates<Tail, Head> | FindDuplicatesWithinDeduplicated<Tail>
: never
type FindDuplicates<Arr extends any[]> = FindDuplicatesWithinDeduplicated<
DeduplicateRelationships<Arr>
>
export type CheckDuplicateEmbededReference<
Schema extends GenericSchema,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.Node[]
> = FilterRelationNodes<Nodes> extends infer RelationsNodes
? RelationsNodes extends Ast.FieldNode[]
? ResolveRelationships<
Schema,
RelationName,
Relationships,
RelationsNodes
> extends infer ResolvedRels
? ResolvedRels extends unknown[]
? FindDuplicates<ResolvedRels> extends infer Duplicates
? Duplicates extends never
? false
: Duplicates extends { fieldName: infer FieldName }
? FieldName extends string
? {
[K in FieldName]: SelectQueryError<`table "${RelationName}" specified more than once use hinting for desambiguation`>
}
: false
: false
: false
: false
: false
: false
: false
/**
* Returns a boolean representing whether there is a foreign key referencing
* a given relation.
*/
type HasFKeyToFRel<FRelName, Relationships> = Relationships extends [infer R]
? R extends { referencedRelation: FRelName }
? true
: false
: Relationships extends [infer R, ...infer Rest]
? HasFKeyToFRel<FRelName, [R]> extends true
? true
: HasFKeyToFRel<FRelName, Rest>
: false
/**
* Checks if there is more than one relation to a given foreign relation name in the Relationships.
*/
type HasMultipleFKeysToFRelDeduplicated<FRelName, Relationships> = Relationships extends [
infer R,
...infer Rest
]
? R extends { referencedRelation: FRelName }
? HasFKeyToFRel<FRelName, Rest> extends true
? true
: HasMultipleFKeysToFRelDeduplicated<FRelName, Rest>
: HasMultipleFKeysToFRelDeduplicated<FRelName, Rest>
: false
type HasMultipleFKeysToFRel<
FRelName,
Relationships extends unknown[]
> = HasMultipleFKeysToFRelDeduplicated<FRelName, DeduplicateRelationships<Relationships>>
type CheckRelationshipError<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FoundRelation
> = FoundRelation extends SelectQueryError<string>
? FoundRelation
: // If the relation is a reverse relation with no hint (matching by name)
FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName
name: string
}
direction: 'reverse'
}
? RelatedRelationName extends string
? // We check if there is possible confusion with other relations with this table
HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true
? // If there is, postgrest will fail at runtime, and require desambiguation via hinting
SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`>
: FoundRelation
: never
: // Same check for forward relationships, but we must gather the relationships from the found relation
FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName
name: string
}
direction: 'forward'
from: infer From
}
? RelatedRelationName extends string
? From extends keyof TablesAndViews<Schema> & string
? HasMultipleFKeysToFRel<
RelatedRelationName,
TablesAndViews<Schema>[From]['Relationships']
> extends true
? SelectQueryError<`Could not embed because more than one relationship was found for '${From}' and '${RelatedRelationName}' you need to hint the column with ${From}!<columnName> ?`>
: FoundRelation
: never
: never
: FoundRelation
/**
* Resolves relationships for embedded resources and retrieves the referenced Table
*/
export type ResolveRelationship<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = ResolveReverseRelationship<
Schema,
Relationships,
Field,
CurrentTableOrView
> extends infer ReverseRelationship
? ReverseRelationship extends false
? CheckRelationshipError<
Schema,
Relationships,
CurrentTableOrView,
ResolveForwardRelationship<Schema, Field, CurrentTableOrView>
>
: CheckRelationshipError<Schema, Relationships, CurrentTableOrView, ReverseRelationship>
: never
/**
* Resolves reverse relationships (from children to parent)
*/
type ResolveReverseRelationship<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = FindFieldMatchingRelationships<Schema, Relationships, Field> extends infer FoundRelation
? FoundRelation extends never
? false
: FoundRelation extends { referencedRelation: infer RelatedRelationName }
? RelatedRelationName extends string
? RelatedRelationName extends keyof TablesAndViews<Schema>
? // If the relation was found via hinting we just return it without any more checks
FoundRelation extends { hint: string }
? {
referencedTable: TablesAndViews<Schema>[RelatedRelationName]
relation: FoundRelation
direction: 'reverse'
from: CurrentTableOrView
}
: // If the relation was found via implicit relation naming, we must ensure there is no conflicting matches
HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true
? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`>
: {
referencedTable: TablesAndViews<Schema>[RelatedRelationName]
relation: FoundRelation
direction: 'reverse'
from: CurrentTableOrView
}
: SelectQueryError<`Relation '${RelatedRelationName}' not found in schema.`>
: false
: false
: false
export type FindMatchingTableRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
value extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends keyof Schema['Tables']
? R extends { foreignKeyName: value }
? R & { match: 'fkname' }
: R extends { referencedRelation: value }
? R & { match: 'refrel' }
: R extends { columns: [value] }
? R & { match: 'col' }
: FindMatchingTableRelationships<Schema, Rest, value>
: FindMatchingTableRelationships<Schema, Rest, value>
: false
: false
: false
export type FindMatchingViewRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
value extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends keyof Schema['Views']
? R extends { foreignKeyName: value }
? R & { match: 'fkname' }
: R extends { referencedRelation: value }
? R & { match: 'refrel' }
: R extends { columns: [value] }
? R & { match: 'col' }
: FindMatchingViewRelationships<Schema, Rest, value>
: FindMatchingViewRelationships<Schema, Rest, value>
: false
: false
: false
export type FindMatchingHintTableRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
hint extends string,
name extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends name
? R extends { foreignKeyName: hint }
? R & { match: 'fkname' }
: R extends { referencedRelation: hint }
? R & { match: 'refrel' }
: R extends { columns: [hint] }
? R & { match: 'col' }
: FindMatchingHintTableRelationships<Schema, Rest, hint, name>
: FindMatchingHintTableRelationships<Schema, Rest, hint, name>
: false
: false
: false
export type FindMatchingHintViewRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
hint extends string,
name extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends name
? R extends { foreignKeyName: hint }
? R & { match: 'fkname' }
: R extends { referencedRelation: hint }
? R & { match: 'refrel' }
: R extends { columns: [hint] }
? R & { match: 'col' }
: FindMatchingHintViewRelationships<Schema, Rest, hint, name>
: FindMatchingHintViewRelationships<Schema, Rest, hint, name>
: false
: false
: false
type IsColumnsNullable<
Table extends Pick<GenericTable, 'Row'>,
Columns extends (keyof Table['Row'])[]
> = Columns extends [infer Column, ...infer Rest]
? Column extends keyof Table['Row']
? ContainsNull<Table['Row'][Column]> extends true
? true
: IsColumnsNullable<Table, Rest extends (keyof Table['Row'])[] ? Rest : []>
: false
: false
// Check weither or not a 1-1 relation is nullable by checking against the type of the columns
export type IsRelationNullable<
Table extends GenericTable,
Relation extends GenericRelationship
> = IsColumnsNullable<Table, Relation['columns']>
type TableForwardRelationships<
Schema extends GenericSchema,
TName
> = TName extends keyof TablesAndViews<Schema>
? UnionToArray<
RecursivelyFindRelationships<Schema, TName, keyof TablesAndViews<Schema>>
> extends infer R
? R extends (GenericRelationship & { from: keyof TablesAndViews<Schema> })[]
? R
: []
: []
: []
type RecursivelyFindRelationships<
Schema extends GenericSchema,
TName,
Keys extends keyof TablesAndViews<Schema>
> = Keys extends infer K
? K extends keyof TablesAndViews<Schema>
? FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K> extends never
? RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>>
:
| FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K>
| RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>>
: false
: false
type FilterRelationships<R, TName, From> = R extends readonly (infer Rel)[]
? Rel extends { referencedRelation: TName }
? Rel & { from: From }
: never
: never
export type ResolveForwardRelationship<
Schema extends GenericSchema,
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = FindFieldMatchingRelationships<
Schema,
TablesAndViews<Schema>[Field['name']]['Relationships'],
Ast.FieldNode & { name: CurrentTableOrView; hint: Field['hint'] }
> extends infer FoundByName
? FoundByName extends GenericRelationship
? {
referencedTable: TablesAndViews<Schema>[Field['name']]
relation: FoundByName
direction: 'forward'
from: Field['name']
type: 'found-by-name'
}
: FindFieldMatchingRelationships<
Schema,
TableForwardRelationships<Schema, CurrentTableOrView>,
Field
> extends infer FoundByMatch
? FoundByMatch extends GenericRelationship & {
from: keyof TablesAndViews<Schema>
}
? {
referencedTable: TablesAndViews<Schema>[FoundByMatch['from']]
relation: FoundByMatch
direction: 'forward'
from: CurrentTableOrView
type: 'found-by-match'
}
: FindJoinTableRelationship<
Schema,
CurrentTableOrView,
Field['name']
> extends infer FoundByJoinTable
? FoundByJoinTable extends GenericRelationship
? {
referencedTable: TablesAndViews<Schema>[FoundByJoinTable['referencedRelation']]
relation: FoundByJoinTable & { match: 'refrel' }
direction: 'forward'
from: CurrentTableOrView
type: 'found-by-join-table'
}
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
/**
* Given a CurrentTableOrView, finds all join tables to this relation.
* For example, if products and categories are linked via product_categories table:
*
* @example
* Given:
* - CurrentTableView = 'products'
* - FieldName = "categories"
*
* It should return this relationship from product_categories:
* {
* foreignKeyName: "product_categories_category_id_fkey",
* columns: ["category_id"],
* isOneToOne: false,
* referencedRelation: "categories",
* referencedColumns: ["id"]
* }
*/
type ResolveJoinTableRelationship<
Schema extends GenericSchema,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FieldName extends string
> = {
[TableName in keyof TablesAndViews<Schema>]: DeduplicateRelationships<
TablesAndViews<Schema>[TableName]['Relationships']
> extends readonly (infer Rel)[]
? Rel extends { referencedRelation: CurrentTableOrView }
? DeduplicateRelationships<
TablesAndViews<Schema>[TableName]['Relationships']
> extends readonly (infer OtherRel)[]
? OtherRel extends { referencedRelation: FieldName }
? OtherRel
: never
: never
: never
: never
}[keyof TablesAndViews<Schema>]
export type FindJoinTableRelationship<
Schema extends GenericSchema,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FieldName extends string
> = ResolveJoinTableRelationship<Schema, CurrentTableOrView, FieldName> extends infer Result
? [Result] extends [never]
? false
: Result
: never
/**
* Finds a matching relationship based on the FieldNode's name and optional hint.
*/
export type FindFieldMatchingRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode
> = Field extends { hint: string }
? FindMatchingHintTableRelationships<
Schema,
Relationships,
Field['hint'],
Field['name']
> extends GenericRelationship
? FindMatchingHintTableRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-table-via-hint'
hint: Field['hint']
}
: FindMatchingHintViewRelationships<
Schema,
Relationships,
Field['hint'],
Field['name']
> extends GenericRelationship
? FindMatchingHintViewRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-view-via-hint'
hint: Field['hint']
}
: SelectQueryError<'Failed to find matching relation via hint'>
: FindMatchingTableRelationships<Schema, Relationships, Field['name']> extends GenericRelationship
? FindMatchingTableRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-table-via-name'
name: Field['name']
}
: FindMatchingViewRelationships<Schema, Relationships, Field['name']> extends GenericRelationship
? FindMatchingViewRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-view-via-name'
name: Field['name']
}
: SelectQueryError<'Failed to find matching relation via name'>
export type JsonPathToAccessor<Path extends string> = Path extends `${infer P1}->${infer P2}`
? P2 extends `>${infer Rest}` // Handle ->> operator
? JsonPathToAccessor<`${P1}.${Rest}`>
: P2 extends string // Handle -> operator
? JsonPathToAccessor<`${P1}.${P2}`>
: Path
: Path extends `>${infer Rest}` // Clean up any remaining > characters
? JsonPathToAccessor<Rest>
: Path extends `${infer P1}::${infer _}` // Handle type casting
? JsonPathToAccessor<P1>
: Path extends `${infer P1}${')' | ','}${infer _}` // Handle closing parenthesis and comma
? P1
: Path
export type JsonPathToType<T, Path extends string> = Path extends ''
? T
: ContainsNull<T> extends true
? JsonPathToType<Exclude<T, null>, Path>
: Path extends `${infer Key}.${infer Rest}`
? Key extends keyof T
? JsonPathToType<T[Key], Rest>
: never
: Path extends keyof T
? T[Path]
: never
export type IsStringUnion<T> = string extends T
? false
: T extends string
? [T] extends [never]
? false
: true
: false

188
node_modules/@supabase/postgrest-js/src/types.ts generated vendored Normal file
View File

@ -0,0 +1,188 @@
import PostgrestError from './PostgrestError'
import { ContainsNull } from './select-query-parser/types'
import { SelectQueryError } from './select-query-parser/utils'
export type Fetch = typeof fetch
/**
* Response format
*
* {@link https://github.com/supabase/supabase-js/issues/32}
*/
interface PostgrestResponseBase {
status: number
statusText: string
}
export interface PostgrestResponseSuccess<T> extends PostgrestResponseBase {
error: null
data: T
count: number | null
}
export interface PostgrestResponseFailure extends PostgrestResponseBase {
error: PostgrestError
data: null
count: null
}
// TODO: in v3:
// - remove PostgrestResponse and PostgrestMaybeSingleResponse
// - rename PostgrestSingleResponse to PostgrestResponse
export type PostgrestSingleResponse<T> = PostgrestResponseSuccess<T> | PostgrestResponseFailure
export type PostgrestMaybeSingleResponse<T> = PostgrestSingleResponse<T | null>
export type PostgrestResponse<T> = PostgrestSingleResponse<T[]>
export type GenericRelationship = {
foreignKeyName: string
columns: string[]
isOneToOne?: boolean
referencedRelation: string
referencedColumns: string[]
}
export type GenericTable = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericUpdatableView = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericNonUpdatableView = {
Row: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericView = GenericUpdatableView | GenericNonUpdatableView
export type GenericFunction = {
Args: Record<string, unknown>
Returns: unknown
}
export type GenericSchema = {
Tables: Record<string, GenericTable>
Views: Record<string, GenericView>
Functions: Record<string, GenericFunction>
}
// https://twitter.com/mattpocockuk/status/1622730173446557697
export type Prettify<T> = { [K in keyof T]: T[K] } & {}
// https://github.com/sindresorhus/type-fest
export type SimplifyDeep<Type, ExcludeType = never> = ConditionalSimplifyDeep<
Type,
ExcludeType | NonRecursiveType | Set<unknown> | Map<unknown, unknown>,
object
>
type ConditionalSimplifyDeep<
Type,
ExcludeType = never,
IncludeType = unknown
> = Type extends ExcludeType
? Type
: Type extends IncludeType
? { [TypeKey in keyof Type]: ConditionalSimplifyDeep<Type[TypeKey], ExcludeType, IncludeType> }
: Type
type NonRecursiveType = BuiltIns | Function | (new (...arguments_: any[]) => unknown)
type BuiltIns = Primitive | void | Date | RegExp
type Primitive = null | undefined | string | number | boolean | symbol | bigint
export type IsValidResultOverride<Result, NewResult, ErrorResult, ErrorNewResult> =
Result extends any[]
? NewResult extends any[]
? // Both are arrays - valid
true
: ErrorResult
: NewResult extends any[]
? ErrorNewResult
: // Neither are arrays - valid
true
/**
* Utility type to check if array types match between Result and NewResult.
* Returns either the valid NewResult type or an error message type.
*/
export type CheckMatchingArrayTypes<Result, NewResult> =
// If the result is a QueryError we allow the user to override anyway
Result extends SelectQueryError<string>
? NewResult
: IsValidResultOverride<
Result,
NewResult,
{
Error: 'Type mismatch: Cannot cast array result to a single object. Use .returns<Array<YourType>> for array results or .single() to convert the result to a single object'
},
{
Error: 'Type mismatch: Cannot cast single object to array type. Remove Array wrapper from return type or make sure you are not using .single() up in the calling chain'
}
> extends infer ValidationResult
? ValidationResult extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? NewResult | null
: NewResult
: // contains the error
ValidationResult
: never
type Simplify<T> = T extends object ? { [K in keyof T]: T[K] } : T
// Extract only explicit (non-index-signature) keys.
type ExplicitKeys<T> = {
[K in keyof T]: string extends K ? never : K
}[keyof T]
type MergeExplicit<New, Row> = {
// We merge all the explicit keys which allows merge and override of types like
// { [key: string]: unknown } and { someSpecificKey: boolean }
[K in ExplicitKeys<New> | ExplicitKeys<Row>]: K extends keyof New
? K extends keyof Row
? Row[K] extends SelectQueryError<string>
? New[K]
: // Check if the override is on a embedded relation (array)
New[K] extends any[]
? Row[K] extends any[]
? Array<Simplify<MergeDeep<NonNullable<New[K][number]>, NonNullable<Row[K][number]>>>>
: New[K]
: // Check if both properties are objects omitting a potential null union
IsPlainObject<NonNullable<New[K]>> extends true
? IsPlainObject<NonNullable<Row[K]>> extends true
? // If they are, use the new override as source of truth for the optionality
ContainsNull<New[K]> extends true
? // If the override wants to preserve optionality
Simplify<MergeDeep<NonNullable<New[K]>, NonNullable<Row[K]>>> | null
: // If the override wants to enforce non-null result
Simplify<MergeDeep<New[K], NonNullable<Row[K]>>>
: New[K] // Override with New type if Row isn't an object
: New[K] // Override primitives with New type
: New[K] // Add new properties from New
: K extends keyof Row
? Row[K] // Keep existing properties not in New
: never
}
type MergeDeep<New, Row> = Simplify<
MergeExplicit<New, Row> &
// Intersection here is to restore dynamic keys into the merging result
// eg:
// {[key: number]: string}
// or Record<string, number | null>
(string extends keyof Row ? { [K: string]: Row[string] } : {})
>
// Helper to check if a type is a plain object (not an array)
type IsPlainObject<T> = T extends any[] ? false : T extends object ? true : false
// Merge the new result with the original (Result) when merge option is true.
// If NewResult is an array, merge each element.
export type MergePartialResult<NewResult, Result, Options> = Options extends { merge: true }
? Result extends any[]
? NewResult extends any[]
? Array<Simplify<MergeDeep<NewResult[number], Result[number]>>>
: never
: Simplify<MergeDeep<NewResult, Result>>
: NewResult

1
node_modules/@supabase/postgrest-js/src/version.ts generated vendored Normal file
View File

@ -0,0 +1 @@
export const version = '1.19.2'

22
node_modules/@supabase/realtime-js/LICENSE.md generated vendored Normal file
View File

@ -0,0 +1,22 @@
# MIT License
Copyright (c) 2020 Supabase
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

221
node_modules/@supabase/realtime-js/README.md generated vendored Normal file
View File

@ -0,0 +1,221 @@
<br />
<p align="center">
<a href="https://supabase.io">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/supabase/supabase/master/packages/common/assets/images/supabase-logo-wordmark--dark.svg">
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/supabase/supabase/master/packages/common/assets/images/supabase-logo-wordmark--light.svg">
<img alt="Supabase Logo" width="300" src="https://raw.githubusercontent.com/supabase/supabase/master/packages/common/assets/images/logo-preview.jpg">
</picture>
</a>
<h1 align="center">Supabase Realtime Client</h1>
<h3 align="center">Send ephemeral messages with <b>Broadcast</b>, track and synchronize state with <b>Presence</b>, and listen to database changes with <b>Postgres Change Data Capture (CDC)</b>.</h3>
<p align="center">
<a href="https://supabase.com/docs/guides/realtime">Guides</a>
·
<a href="https://supabase.com/docs/reference/javascript">Reference Docs</a>
·
<a href="https://multiplayer.dev">Multiplayer Demo</a>
</p>
</p>
# Overview
This client enables you to use the following Supabase Realtime's features:
- **Broadcast**: send ephemeral messages from client to clients with minimal latency. Use cases include sharing cursor positions between users.
- **Presence**: track and synchronize shared state across clients with the help of CRDTs. Use cases include tracking which users are currently viewing a specific webpage.
- **Postgres Change Data Capture (CDC)**: listen for changes in your PostgreSQL database and send them to clients.
# Usage
## Installing the Package
```bash
npm install @supabase/realtime-js
```
## Creating a Channel
```js
import { RealtimeClient } from '@supabase/realtime-js'
const client = new RealtimeClient(REALTIME_URL, {
params: {
apikey: API_KEY
},
})
const channel = client.channel('test-channel', {})
channel.subscribe((status, err) => {
if (status === 'SUBSCRIBED') {
console.log('Connected!')
}
if (status === 'CHANNEL_ERROR') {
console.log(`There was an error subscribing to channel: ${err.message}`)
}
if (status === 'TIMED_OUT') {
console.log('Realtime server did not respond in time.')
}
if (status === 'CLOSED') {
console.log('Realtime channel was unexpectedly closed.')
}
})
```
### Notes:
- `REALTIME_URL` is `'ws://localhost:4000/socket'` when developing locally and `'wss://<project_ref>.supabase.co/realtime/v1'` when connecting to your Supabase project.
- `API_KEY` is a JWT whose claims must contain `exp` and `role` (existing database role).
- Channel name can be any `string`.
## Broadcast
Your client can send and receive messages based on the `event`.
```js
// Setup...
const channel = client.channel('broadcast-test', { broadcast: { ack: false, self: false } })
channel.on('broadcast', { event: 'some-event' }, (payload) =>
console.log(payload)
)
channel.subscribe(async (status) => {
if (status === 'SUBSCRIBED') {
// Send message to other clients listening to 'broadcast-test' channel
await channel.send({
type: 'broadcast',
event: 'some-event',
payload: { hello: 'world' },
})
}
})
```
### Notes:
- Setting `ack` to `true` means that the `channel.send` promise will resolve once server replies with acknowledgement that it received the broadcast message request.
- Setting `self` to `true` means that the client will receive the broadcast message it sent out.
- Setting `private` to `true` means that the client will use RLS to determine if the user can connect or not to a given channel.
## Presence
Your client can track and sync state that's stored in the channel.
```js
// Setup...
const channel = client.channel(
'presence-test',
{
config: {
presence: {
key: ''
}
}
}
)
channel.on('presence', { event: 'sync' }, () => {
console.log('Online users: ', channel.presenceState())
})
channel.on('presence', { event: 'join' }, ({ newPresences }) => {
console.log('New users have joined: ', newPresences)
})
channel.on('presence', { event: 'leave' }, ({ leftPresences }) => {
console.log('Users have left: ', leftPresences)
})
channel.subscribe(async (status) => {
if (status === 'SUBSCRIBED') {
const status = await channel.track({ 'user_id': 1 })
console.log(status)
}
})
```
## Postgres CDC
Receive database changes on the client.
```js
// Setup...
const channel = client.channel('db-changes')
channel.on('postgres_changes', { event: '*', schema: 'public' }, (payload) => {
console.log('All changes in public schema: ', payload)
})
channel.on('postgres_changes', { event: 'INSERT', schema: 'public', table: 'messages' }, (payload) => {
console.log('All inserts in messages table: ', payload)
})
channel.on('postgres_changes', { event: 'UPDATE', schema: 'public', table: 'users', filter: 'username=eq.Realtime' }, (payload) => {
console.log('All updates on users table when username is Realtime: ', payload)
})
channel.subscribe(async (status) => {
if (status === 'SUBSCRIBED') {
console.log('Ready to receive database changes!')
}
})
```
## Get All Channels
You can see all the channels that your client has instantiatied.
```js
// Setup...
client.getChannels()
```
## Cleanup
It is highly recommended that you clean up your channels after you're done with them.
- Remove a single channel
```js
// Setup...
const channel = client.channel('some-channel-to-remove')
channel.subscribe()
client.removeChannel(channel)
```
- Remove all channels
```js
// Setup...
const channel1 = client.channel('a-channel-to-remove')
const channel2 = client.channel('another-channel-to-remove')
channel1.subscribe()
channel2.subscribe()
client.removeAllChannels()
```
## Credits
This repo draws heavily from [phoenix-js](https://github.com/phoenixframework/phoenix/tree/master/assets/js/phoenix).
## License
MIT.

66
node_modules/@supabase/realtime-js/package.json generated vendored Normal file
View File

@ -0,0 +1,66 @@
{
"name": "@supabase/realtime-js",
"version": "2.11.2",
"description": "Listen to realtime updates to your PostgreSQL database",
"keywords": [
"realtime",
"phoenix",
"elixir",
"javascript",
"typescript",
"firebase",
"supabase"
],
"homepage": "https://github.com/supabase/realtime-js",
"bugs": "https://github.com/supabase/realtime-js/issues",
"files": [
"dist",
"src"
],
"main": "dist/main/index.js",
"module": "dist/module/index.js",
"types": "dist/module/index.d.ts",
"repository": "https://github.com/supabase/realtime-js",
"author": "Supabase",
"license": "MIT",
"scripts": {
"clean": "rimraf dist docs/v2",
"format": "prettier --write \"{src,test}/**/*.ts\"",
"build": "run-s clean format build:*",
"build:main": "tsc -p tsconfig.json",
"build:module": "tsc -p tsconfig.module.json",
"test": "vitest run",
"test:watch": "vitest",
"coverage": "vitest run --coverage",
"docs": "typedoc src/index.ts --out docs/v2",
"docs:json": "typedoc --json docs/v2/spec.json --excludeExternals src/index.ts",
"check-exports": "attw --pack .",
"ci": "run-s test coverage"
},
"dependencies": {
"@supabase/node-fetch": "^2.6.14",
"@types/phoenix": "^1.5.4",
"@types/ws": "^8.5.10",
"ws": "^8.18.0"
},
"devDependencies": {
"@arethetypeswrong/cli": "^0.16.2",
"@types/sinon": "^17.0.3",
"@vitest/coverage-v8": "^2.0.5",
"eslint": "^7.0.0",
"esm": "^3.2.25",
"jsdom": "^16.7.0",
"jsdom-global": "3.0.0",
"jsonwebtoken": "^9.0.2",
"mock-socket": "^9.3.1",
"npm-run-all": "^4.1.5",
"nyc": "^15.1.0",
"prettier": "^2.1.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"sinon": "^18.0.0",
"typedoc": "^0.22.16",
"typescript": "^4.0.3",
"vitest": "^2.0.5",
"web-worker": "1.2.0"
}
}

View File

@ -0,0 +1,818 @@
import { CHANNEL_EVENTS, CHANNEL_STATES } from './lib/constants'
import Push from './lib/push'
import type RealtimeClient from './RealtimeClient'
import Timer from './lib/timer'
import RealtimePresence, {
REALTIME_PRESENCE_LISTEN_EVENTS,
} from './RealtimePresence'
import type {
RealtimePresenceJoinPayload,
RealtimePresenceLeavePayload,
RealtimePresenceState,
} from './RealtimePresence'
import * as Transformers from './lib/transformers'
import { httpEndpointURL } from './lib/transformers'
export type RealtimeChannelOptions = {
config: {
/**
* self option enables client to receive message it broadcast
* ack option instructs server to acknowledge that broadcast message was received
*/
broadcast?: { self?: boolean; ack?: boolean }
/**
* key option is used to track presence payload across clients
*/
presence?: { key?: string }
/**
* defines if the channel is private or not and if RLS policies will be used to check data
*/
private?: boolean
}
}
type RealtimePostgresChangesPayloadBase = {
schema: string
table: string
commit_timestamp: string
errors: string[]
}
export type RealtimePostgresInsertPayload<T extends { [key: string]: any }> =
RealtimePostgresChangesPayloadBase & {
eventType: `${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.INSERT}`
new: T
old: {}
}
export type RealtimePostgresUpdatePayload<T extends { [key: string]: any }> =
RealtimePostgresChangesPayloadBase & {
eventType: `${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.UPDATE}`
new: T
old: Partial<T>
}
export type RealtimePostgresDeletePayload<T extends { [key: string]: any }> =
RealtimePostgresChangesPayloadBase & {
eventType: `${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.DELETE}`
new: {}
old: Partial<T>
}
export type RealtimePostgresChangesPayload<T extends { [key: string]: any }> =
| RealtimePostgresInsertPayload<T>
| RealtimePostgresUpdatePayload<T>
| RealtimePostgresDeletePayload<T>
export type RealtimePostgresChangesFilter<
T extends `${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT}`
> = {
/**
* The type of database change to listen to.
*/
event: T
/**
* The database schema to listen to.
*/
schema: string
/**
* The database table to listen to.
*/
table?: string
/**
* Receive database changes when filter is matched.
*/
filter?: string
}
export type RealtimeChannelSendResponse = 'ok' | 'timed out' | 'error'
export enum REALTIME_POSTGRES_CHANGES_LISTEN_EVENT {
ALL = '*',
INSERT = 'INSERT',
UPDATE = 'UPDATE',
DELETE = 'DELETE',
}
export enum REALTIME_LISTEN_TYPES {
BROADCAST = 'broadcast',
PRESENCE = 'presence',
POSTGRES_CHANGES = 'postgres_changes',
SYSTEM = 'system',
}
export enum REALTIME_SUBSCRIBE_STATES {
SUBSCRIBED = 'SUBSCRIBED',
TIMED_OUT = 'TIMED_OUT',
CLOSED = 'CLOSED',
CHANNEL_ERROR = 'CHANNEL_ERROR',
}
export const REALTIME_CHANNEL_STATES = CHANNEL_STATES
interface PostgresChangesFilters {
postgres_changes: {
id: string
event: string
schema?: string
table?: string
filter?: string
}[]
}
/** A channel is the basic building block of Realtime
* and narrows the scope of data flow to subscribed clients.
* You can think of a channel as a chatroom where participants are able to see who's online
* and send and receive messages.
*/
export default class RealtimeChannel {
bindings: {
[key: string]: {
type: string
filter: { [key: string]: any }
callback: Function
id?: string
}[]
} = {}
timeout: number
state = CHANNEL_STATES.closed
joinedOnce = false
joinPush: Push
rejoinTimer: Timer
pushBuffer: Push[] = []
presence: RealtimePresence
broadcastEndpointURL: string
subTopic: string
private: boolean
constructor(
/** Topic name can be any string. */
public topic: string,
public params: RealtimeChannelOptions = { config: {} },
public socket: RealtimeClient
) {
this.subTopic = topic.replace(/^realtime:/i, '')
this.params.config = {
...{
broadcast: { ack: false, self: false },
presence: { key: '' },
private: false,
},
...params.config,
}
this.timeout = this.socket.timeout
this.joinPush = new Push(
this,
CHANNEL_EVENTS.join,
this.params,
this.timeout
)
this.rejoinTimer = new Timer(
() => this._rejoinUntilConnected(),
this.socket.reconnectAfterMs
)
this.joinPush.receive('ok', () => {
this.state = CHANNEL_STATES.joined
this.rejoinTimer.reset()
this.pushBuffer.forEach((pushEvent: Push) => pushEvent.send())
this.pushBuffer = []
})
this._onClose(() => {
this.rejoinTimer.reset()
this.socket.log('channel', `close ${this.topic} ${this._joinRef()}`)
this.state = CHANNEL_STATES.closed
this.socket._remove(this)
})
this._onError((reason: string) => {
if (this._isLeaving() || this._isClosed()) {
return
}
this.socket.log('channel', `error ${this.topic}`, reason)
this.state = CHANNEL_STATES.errored
this.rejoinTimer.scheduleTimeout()
})
this.joinPush.receive('timeout', () => {
if (!this._isJoining()) {
return
}
this.socket.log('channel', `timeout ${this.topic}`, this.joinPush.timeout)
this.state = CHANNEL_STATES.errored
this.rejoinTimer.scheduleTimeout()
})
this._on(CHANNEL_EVENTS.reply, {}, (payload: any, ref: string) => {
this._trigger(this._replyEventName(ref), payload)
})
this.presence = new RealtimePresence(this)
this.broadcastEndpointURL =
httpEndpointURL(this.socket.endPoint) + '/api/broadcast'
this.private = this.params.config.private || false
}
/** Subscribe registers your client with the server */
subscribe(
callback?: (status: REALTIME_SUBSCRIBE_STATES, err?: Error) => void,
timeout = this.timeout
): RealtimeChannel {
if (!this.socket.isConnected()) {
this.socket.connect()
}
if (this.joinedOnce) {
throw `tried to subscribe multiple times. 'subscribe' can only be called a single time per channel instance`
} else {
const {
config: { broadcast, presence, private: isPrivate },
} = this.params
this._onError((e: Error) =>
callback?.(REALTIME_SUBSCRIBE_STATES.CHANNEL_ERROR, e)
)
this._onClose(() => callback?.(REALTIME_SUBSCRIBE_STATES.CLOSED))
const accessTokenPayload: { access_token?: string } = {}
const config = {
broadcast,
presence,
postgres_changes:
this.bindings.postgres_changes?.map((r) => r.filter) ?? [],
private: isPrivate,
}
if (this.socket.accessTokenValue) {
accessTokenPayload.access_token = this.socket.accessTokenValue
}
this.updateJoinPayload({ ...{ config }, ...accessTokenPayload })
this.joinedOnce = true
this._rejoin(timeout)
this.joinPush
.receive('ok', async ({ postgres_changes }: PostgresChangesFilters) => {
this.socket.setAuth()
if (postgres_changes === undefined) {
callback?.(REALTIME_SUBSCRIBE_STATES.SUBSCRIBED)
return
} else {
const clientPostgresBindings = this.bindings.postgres_changes
const bindingsLen = clientPostgresBindings?.length ?? 0
const newPostgresBindings = []
for (let i = 0; i < bindingsLen; i++) {
const clientPostgresBinding = clientPostgresBindings[i]
const {
filter: { event, schema, table, filter },
} = clientPostgresBinding
const serverPostgresFilter =
postgres_changes && postgres_changes[i]
if (
serverPostgresFilter &&
serverPostgresFilter.event === event &&
serverPostgresFilter.schema === schema &&
serverPostgresFilter.table === table &&
serverPostgresFilter.filter === filter
) {
newPostgresBindings.push({
...clientPostgresBinding,
id: serverPostgresFilter.id,
})
} else {
this.unsubscribe()
callback?.(
REALTIME_SUBSCRIBE_STATES.CHANNEL_ERROR,
new Error(
'mismatch between server and client bindings for postgres changes'
)
)
return
}
}
this.bindings.postgres_changes = newPostgresBindings
callback && callback(REALTIME_SUBSCRIBE_STATES.SUBSCRIBED)
return
}
})
.receive('error', (error: { [key: string]: any }) => {
callback?.(
REALTIME_SUBSCRIBE_STATES.CHANNEL_ERROR,
new Error(
JSON.stringify(Object.values(error).join(', ') || 'error')
)
)
return
})
.receive('timeout', () => {
callback?.(REALTIME_SUBSCRIBE_STATES.TIMED_OUT)
return
})
}
return this
}
presenceState<
T extends { [key: string]: any } = {}
>(): RealtimePresenceState<T> {
return this.presence.state as RealtimePresenceState<T>
}
async track(
payload: { [key: string]: any },
opts: { [key: string]: any } = {}
): Promise<RealtimeChannelSendResponse> {
return await this.send(
{
type: 'presence',
event: 'track',
payload,
},
opts.timeout || this.timeout
)
}
async untrack(
opts: { [key: string]: any } = {}
): Promise<RealtimeChannelSendResponse> {
return await this.send(
{
type: 'presence',
event: 'untrack',
},
opts
)
}
/**
* Creates an event handler that listens to changes.
*/
on(
type: `${REALTIME_LISTEN_TYPES.PRESENCE}`,
filter: { event: `${REALTIME_PRESENCE_LISTEN_EVENTS.SYNC}` },
callback: () => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.PRESENCE}`,
filter: { event: `${REALTIME_PRESENCE_LISTEN_EVENTS.JOIN}` },
callback: (payload: RealtimePresenceJoinPayload<T>) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.PRESENCE}`,
filter: { event: `${REALTIME_PRESENCE_LISTEN_EVENTS.LEAVE}` },
callback: (payload: RealtimePresenceLeavePayload<T>) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.POSTGRES_CHANGES}`,
filter: RealtimePostgresChangesFilter<`${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.ALL}`>,
callback: (payload: RealtimePostgresChangesPayload<T>) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.POSTGRES_CHANGES}`,
filter: RealtimePostgresChangesFilter<`${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.INSERT}`>,
callback: (payload: RealtimePostgresInsertPayload<T>) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.POSTGRES_CHANGES}`,
filter: RealtimePostgresChangesFilter<`${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.UPDATE}`>,
callback: (payload: RealtimePostgresUpdatePayload<T>) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.POSTGRES_CHANGES}`,
filter: RealtimePostgresChangesFilter<`${REALTIME_POSTGRES_CHANGES_LISTEN_EVENT.DELETE}`>,
callback: (payload: RealtimePostgresDeletePayload<T>) => void
): RealtimeChannel
/**
* The following is placed here to display on supabase.com/docs/reference/javascript/subscribe.
* @param type One of "broadcast", "presence", or "postgres_changes".
* @param filter Custom object specific to the Realtime feature detailing which payloads to receive.
* @param callback Function to be invoked when event handler is triggered.
*/
on(
type: `${REALTIME_LISTEN_TYPES.BROADCAST}`,
filter: { event: string },
callback: (payload: {
type: `${REALTIME_LISTEN_TYPES.BROADCAST}`
event: string
[key: string]: any
}) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.BROADCAST}`,
filter: { event: string },
callback: (payload: {
type: `${REALTIME_LISTEN_TYPES.BROADCAST}`
event: string
payload: T
}) => void
): RealtimeChannel
on<T extends { [key: string]: any }>(
type: `${REALTIME_LISTEN_TYPES.SYSTEM}`,
filter: {},
callback: (payload: any) => void
): RealtimeChannel
on(
type: `${REALTIME_LISTEN_TYPES}`,
filter: { event: string; [key: string]: string },
callback: (payload: any) => void
): RealtimeChannel {
return this._on(type, filter, callback)
}
/**
* Sends a message into the channel.
*
* @param args Arguments to send to channel
* @param args.type The type of event to send
* @param args.event The name of the event being sent
* @param args.payload Payload to be sent
* @param opts Options to be used during the send process
*/
async send(
args: {
type: 'broadcast' | 'presence' | 'postgres_changes'
event: string
payload?: any
[key: string]: any
},
opts: { [key: string]: any } = {}
): Promise<RealtimeChannelSendResponse> {
if (!this._canPush() && args.type === 'broadcast') {
const { event, payload: endpoint_payload } = args
const authorization = this.socket.accessTokenValue
? `Bearer ${this.socket.accessTokenValue}`
: ''
const options = {
method: 'POST',
headers: {
Authorization: authorization,
apikey: this.socket.apiKey ? this.socket.apiKey : '',
'Content-Type': 'application/json',
},
body: JSON.stringify({
messages: [
{
topic: this.subTopic,
event,
payload: endpoint_payload,
private: this.private,
},
],
}),
}
try {
const response = await this._fetchWithTimeout(
this.broadcastEndpointURL,
options,
opts.timeout ?? this.timeout
)
await response.body?.cancel()
return response.ok ? 'ok' : 'error'
} catch (error: any) {
if (error.name === 'AbortError') {
return 'timed out'
} else {
return 'error'
}
}
} else {
return new Promise((resolve) => {
const push = this._push(args.type, args, opts.timeout || this.timeout)
if (args.type === 'broadcast' && !this.params?.config?.broadcast?.ack) {
resolve('ok')
}
push.receive('ok', () => resolve('ok'))
push.receive('error', () => resolve('error'))
push.receive('timeout', () => resolve('timed out'))
})
}
}
updateJoinPayload(payload: { [key: string]: any }): void {
this.joinPush.updatePayload(payload)
}
/**
* Leaves the channel.
*
* Unsubscribes from server events, and instructs channel to terminate on server.
* Triggers onClose() hooks.
*
* To receive leave acknowledgements, use the a `receive` hook to bind to the server ack, ie:
* channel.unsubscribe().receive("ok", () => alert("left!") )
*/
unsubscribe(timeout = this.timeout): Promise<'ok' | 'timed out' | 'error'> {
this.state = CHANNEL_STATES.leaving
const onClose = () => {
this.socket.log('channel', `leave ${this.topic}`)
this._trigger(CHANNEL_EVENTS.close, 'leave', this._joinRef())
}
this.rejoinTimer.reset()
// Destroy joinPush to avoid connection timeouts during unscription phase
this.joinPush.destroy()
return new Promise((resolve) => {
const leavePush = new Push(this, CHANNEL_EVENTS.leave, {}, timeout)
leavePush
.receive('ok', () => {
onClose()
resolve('ok')
})
.receive('timeout', () => {
onClose()
resolve('timed out')
})
.receive('error', () => {
resolve('error')
})
leavePush.send()
if (!this._canPush()) {
leavePush.trigger('ok', {})
}
})
}
/** @internal */
async _fetchWithTimeout(
url: string,
options: { [key: string]: any },
timeout: number
) {
const controller = new AbortController()
const id = setTimeout(() => controller.abort(), timeout)
const response = await this.socket.fetch(url, {
...options,
signal: controller.signal,
})
clearTimeout(id)
return response
}
/** @internal */
_push(
event: string,
payload: { [key: string]: any },
timeout = this.timeout
) {
if (!this.joinedOnce) {
throw `tried to push '${event}' to '${this.topic}' before joining. Use channel.subscribe() before pushing events`
}
let pushEvent = new Push(this, event, payload, timeout)
if (this._canPush()) {
pushEvent.send()
} else {
pushEvent.startTimeout()
this.pushBuffer.push(pushEvent)
}
return pushEvent
}
/**
* Overridable message hook
*
* Receives all events for specialized message handling before dispatching to the channel callbacks.
* Must return the payload, modified or unmodified.
*
* @internal
*/
_onMessage(_event: string, payload: any, _ref?: string) {
return payload
}
/** @internal */
_isMember(topic: string): boolean {
return this.topic === topic
}
/** @internal */
_joinRef(): string {
return this.joinPush.ref
}
/** @internal */
_trigger(type: string, payload?: any, ref?: string) {
const typeLower = type.toLocaleLowerCase()
const { close, error, leave, join } = CHANNEL_EVENTS
const events: string[] = [close, error, leave, join]
if (ref && events.indexOf(typeLower) >= 0 && ref !== this._joinRef()) {
return
}
let handledPayload = this._onMessage(typeLower, payload, ref)
if (payload && !handledPayload) {
throw 'channel onMessage callbacks must return the payload, modified or unmodified'
}
if (['insert', 'update', 'delete'].includes(typeLower)) {
this.bindings.postgres_changes
?.filter((bind) => {
return (
bind.filter?.event === '*' ||
bind.filter?.event?.toLocaleLowerCase() === typeLower
)
})
.map((bind) => bind.callback(handledPayload, ref))
} else {
this.bindings[typeLower]
?.filter((bind) => {
if (
['broadcast', 'presence', 'postgres_changes'].includes(typeLower)
) {
if ('id' in bind) {
const bindId = bind.id
const bindEvent = bind.filter?.event
return (
bindId &&
payload.ids?.includes(bindId) &&
(bindEvent === '*' ||
bindEvent?.toLocaleLowerCase() ===
payload.data?.type.toLocaleLowerCase())
)
} else {
const bindEvent = bind?.filter?.event?.toLocaleLowerCase()
return (
bindEvent === '*' ||
bindEvent === payload?.event?.toLocaleLowerCase()
)
}
} else {
return bind.type.toLocaleLowerCase() === typeLower
}
})
.map((bind) => {
if (typeof handledPayload === 'object' && 'ids' in handledPayload) {
const postgresChanges = handledPayload.data
const { schema, table, commit_timestamp, type, errors } =
postgresChanges
const enrichedPayload = {
schema: schema,
table: table,
commit_timestamp: commit_timestamp,
eventType: type,
new: {},
old: {},
errors: errors,
}
handledPayload = {
...enrichedPayload,
...this._getPayloadRecords(postgresChanges),
}
}
bind.callback(handledPayload, ref)
})
}
}
/** @internal */
_isClosed(): boolean {
return this.state === CHANNEL_STATES.closed
}
/** @internal */
_isJoined(): boolean {
return this.state === CHANNEL_STATES.joined
}
/** @internal */
_isJoining(): boolean {
return this.state === CHANNEL_STATES.joining
}
/** @internal */
_isLeaving(): boolean {
return this.state === CHANNEL_STATES.leaving
}
/** @internal */
_replyEventName(ref: string): string {
return `chan_reply_${ref}`
}
/** @internal */
_on(type: string, filter: { [key: string]: any }, callback: Function) {
const typeLower = type.toLocaleLowerCase()
const binding = {
type: typeLower,
filter: filter,
callback: callback,
}
if (this.bindings[typeLower]) {
this.bindings[typeLower].push(binding)
} else {
this.bindings[typeLower] = [binding]
}
return this
}
/** @internal */
_off(type: string, filter: { [key: string]: any }) {
const typeLower = type.toLocaleLowerCase()
this.bindings[typeLower] = this.bindings[typeLower].filter((bind) => {
return !(
bind.type?.toLocaleLowerCase() === typeLower &&
RealtimeChannel.isEqual(bind.filter, filter)
)
})
return this
}
/** @internal */
private static isEqual(
obj1: { [key: string]: string },
obj2: { [key: string]: string }
) {
if (Object.keys(obj1).length !== Object.keys(obj2).length) {
return false
}
for (const k in obj1) {
if (obj1[k] !== obj2[k]) {
return false
}
}
return true
}
/** @internal */
private _rejoinUntilConnected() {
this.rejoinTimer.scheduleTimeout()
if (this.socket.isConnected()) {
this._rejoin()
}
}
/**
* Registers a callback that will be executed when the channel closes.
*
* @internal
*/
private _onClose(callback: Function) {
this._on(CHANNEL_EVENTS.close, {}, callback)
}
/**
* Registers a callback that will be executed when the channel encounteres an error.
*
* @internal
*/
private _onError(callback: Function) {
this._on(CHANNEL_EVENTS.error, {}, (reason: string) => callback(reason))
}
/**
* Returns `true` if the socket is connected and the channel has been joined.
*
* @internal
*/
private _canPush(): boolean {
return this.socket.isConnected() && this._isJoined()
}
/** @internal */
private _rejoin(timeout = this.timeout): void {
if (this._isLeaving()) {
return
}
this.socket._leaveOpenTopic(this.topic)
this.state = CHANNEL_STATES.joining
this.joinPush.resend(timeout)
}
/** @internal */
private _getPayloadRecords(payload: any) {
const records = {
new: {},
old: {},
}
if (payload.type === 'INSERT' || payload.type === 'UPDATE') {
records.new = Transformers.convertChangeData(
payload.columns,
payload.record
)
}
if (payload.type === 'UPDATE' || payload.type === 'DELETE') {
records.old = Transformers.convertChangeData(
payload.columns,
payload.old_record
)
}
return records
}
}

View File

@ -0,0 +1,641 @@
import type { WebSocket as WSWebSocket } from 'ws'
import {
CHANNEL_EVENTS,
CONNECTION_STATE,
DEFAULT_HEADERS,
DEFAULT_TIMEOUT,
SOCKET_STATES,
TRANSPORTS,
VSN,
WS_CLOSE_NORMAL,
} from './lib/constants'
import Serializer from './lib/serializer'
import Timer from './lib/timer'
import { httpEndpointURL } from './lib/transformers'
import RealtimeChannel from './RealtimeChannel'
import type { RealtimeChannelOptions } from './RealtimeChannel'
type Fetch = typeof fetch
export type Channel = {
name: string
inserted_at: string
updated_at: string
id: number
}
export type RealtimeClientOptions = {
transport?: WebSocketLikeConstructor
timeout?: number
heartbeatIntervalMs?: number
logger?: Function
encode?: Function
decode?: Function
reconnectAfterMs?: Function
headers?: { [key: string]: string }
params?: { [key: string]: any }
log_level?: 'info' | 'debug' | 'warn' | 'error'
fetch?: Fetch
worker?: boolean
workerUrl?: string
accessToken?: () => Promise<string | null>
}
export type RealtimeMessage = {
topic: string
event: string
payload: any
ref: string
join_ref?: string
}
export type RealtimeRemoveChannelResponse = 'ok' | 'timed out' | 'error'
const noop = () => {}
export interface WebSocketLikeConstructor {
new (
address: string | URL,
_ignored?: any,
options?: { headers: Object | undefined }
): WebSocketLike
}
export type WebSocketLike = WebSocket | WSWebSocket | WSWebSocketDummy
export interface WebSocketLikeError {
error: any
message: string
type: string
}
const NATIVE_WEBSOCKET_AVAILABLE = typeof WebSocket !== 'undefined'
const WORKER_SCRIPT = `
addEventListener("message", (e) => {
if (e.data.event === "start") {
setInterval(() => postMessage({ event: "keepAlive" }), e.data.interval);
}
});`
export default class RealtimeClient {
accessTokenValue: string | null = null
apiKey: string | null = null
channels: RealtimeChannel[] = []
endPoint: string = ''
httpEndpoint: string = ''
headers?: { [key: string]: string } = DEFAULT_HEADERS
params?: { [key: string]: string } = {}
timeout: number = DEFAULT_TIMEOUT
transport: WebSocketLikeConstructor | null
heartbeatIntervalMs: number = 30000
heartbeatTimer: ReturnType<typeof setInterval> | undefined = undefined
pendingHeartbeatRef: string | null = null
ref: number = 0
reconnectTimer: Timer
logger: Function = noop
encode: Function
decode: Function
reconnectAfterMs: Function
conn: WebSocketLike | null = null
sendBuffer: Function[] = []
serializer: Serializer = new Serializer()
stateChangeCallbacks: {
open: Function[]
close: Function[]
error: Function[]
message: Function[]
} = {
open: [],
close: [],
error: [],
message: [],
}
fetch: Fetch
accessToken: (() => Promise<string | null>) | null = null
worker?: boolean
workerUrl?: string
workerRef?: Worker
/**
* Initializes the Socket.
*
* @param endPoint The string WebSocket endpoint, ie, "ws://example.com/socket", "wss://example.com", "/socket" (inherited host & protocol)
* @param httpEndpoint The string HTTP endpoint, ie, "https://example.com", "/" (inherited host & protocol)
* @param options.transport The Websocket Transport, for example WebSocket.
* @param options.timeout The default timeout in milliseconds to trigger push timeouts.
* @param options.params The optional params to pass when connecting.
* @param options.headers The optional headers to pass when connecting.
* @param options.heartbeatIntervalMs The millisec interval to send a heartbeat message.
* @param options.logger The optional function for specialized logging, ie: logger: (kind, msg, data) => { console.log(`${kind}: ${msg}`, data) }
* @param options.encode The function to encode outgoing messages. Defaults to JSON: (payload, callback) => callback(JSON.stringify(payload))
* @param options.decode The function to decode incoming messages. Defaults to Serializer's decode.
* @param options.reconnectAfterMs he optional function that returns the millsec reconnect interval. Defaults to stepped backoff off.
* @param options.worker Use Web Worker to set a side flow. Defaults to false.
* @param options.workerUrl The URL of the worker script. Defaults to https://realtime.supabase.com/worker.js that includes a heartbeat event call to keep the connection alive.
*/
constructor(endPoint: string, options?: RealtimeClientOptions) {
this.endPoint = `${endPoint}/${TRANSPORTS.websocket}`
this.httpEndpoint = httpEndpointURL(endPoint)
if (options?.transport) {
this.transport = options.transport
} else {
this.transport = null
}
if (options?.params) this.params = options.params
if (options?.headers) this.headers = { ...this.headers, ...options.headers }
if (options?.timeout) this.timeout = options.timeout
if (options?.logger) this.logger = options.logger
if (options?.heartbeatIntervalMs)
this.heartbeatIntervalMs = options.heartbeatIntervalMs
const accessTokenValue = options?.params?.apikey
if (accessTokenValue) {
this.accessTokenValue = accessTokenValue
this.apiKey = accessTokenValue
}
this.reconnectAfterMs = options?.reconnectAfterMs
? options.reconnectAfterMs
: (tries: number) => {
return [1000, 2000, 5000, 10000][tries - 1] || 10000
}
this.encode = options?.encode
? options.encode
: (payload: JSON, callback: Function) => {
return callback(JSON.stringify(payload))
}
this.decode = options?.decode
? options.decode
: this.serializer.decode.bind(this.serializer)
this.reconnectTimer = new Timer(async () => {
this.disconnect()
this.connect()
}, this.reconnectAfterMs)
this.fetch = this._resolveFetch(options?.fetch)
if (options?.worker) {
if (typeof window !== 'undefined' && !window.Worker) {
throw new Error('Web Worker is not supported')
}
this.worker = options?.worker || false
this.workerUrl = options?.workerUrl
}
this.accessToken = options?.accessToken || null
}
/**
* Connects the socket, unless already connected.
*/
connect(): void {
if (this.conn) {
return
}
if (this.transport) {
this.conn = new this.transport(this.endpointURL(), undefined, {
headers: this.headers,
})
return
}
if (NATIVE_WEBSOCKET_AVAILABLE) {
this.conn = new WebSocket(this.endpointURL())
this.setupConnection()
return
}
this.conn = new WSWebSocketDummy(this.endpointURL(), undefined, {
close: () => {
this.conn = null
},
})
import('ws').then(({ default: WS }) => {
this.conn = new WS(this.endpointURL(), undefined, {
headers: this.headers,
})
this.setupConnection()
})
}
/**
* Returns the URL of the websocket.
* @returns string The URL of the websocket.
*/
endpointURL(): string {
return this._appendParams(
this.endPoint,
Object.assign({}, this.params, { vsn: VSN })
)
}
/**
* Disconnects the socket.
*
* @param code A numeric status code to send on disconnect.
* @param reason A custom reason for the disconnect.
*/
disconnect(code?: number, reason?: string): void {
if (this.conn) {
this.conn.onclose = function () {} // noop
if (code) {
this.conn.close(code, reason ?? '')
} else {
this.conn.close()
}
this.conn = null
// remove open handles
this.heartbeatTimer && clearInterval(this.heartbeatTimer)
this.reconnectTimer.reset()
}
}
/**
* Returns all created channels
*/
getChannels(): RealtimeChannel[] {
return this.channels
}
/**
* Unsubscribes and removes a single channel
* @param channel A RealtimeChannel instance
*/
async removeChannel(
channel: RealtimeChannel
): Promise<RealtimeRemoveChannelResponse> {
const status = await channel.unsubscribe()
if (this.channels.length === 0) {
this.disconnect()
}
return status
}
/**
* Unsubscribes and removes all channels
*/
async removeAllChannels(): Promise<RealtimeRemoveChannelResponse[]> {
const values_1 = await Promise.all(
this.channels.map((channel) => channel.unsubscribe())
)
this.disconnect()
return values_1
}
/**
* Logs the message.
*
* For customized logging, `this.logger` can be overridden.
*/
log(kind: string, msg: string, data?: any) {
this.logger(kind, msg, data)
}
/**
* Returns the current state of the socket.
*/
connectionState(): CONNECTION_STATE {
switch (this.conn && this.conn.readyState) {
case SOCKET_STATES.connecting:
return CONNECTION_STATE.Connecting
case SOCKET_STATES.open:
return CONNECTION_STATE.Open
case SOCKET_STATES.closing:
return CONNECTION_STATE.Closing
default:
return CONNECTION_STATE.Closed
}
}
/**
* Returns `true` is the connection is open.
*/
isConnected(): boolean {
return this.connectionState() === CONNECTION_STATE.Open
}
channel(
topic: string,
params: RealtimeChannelOptions = { config: {} }
): RealtimeChannel {
const chan = new RealtimeChannel(`realtime:${topic}`, params, this)
this.channels.push(chan)
return chan
}
/**
* Push out a message if the socket is connected.
*
* If the socket is not connected, the message gets enqueued within a local buffer, and sent out when a connection is next established.
*/
push(data: RealtimeMessage): void {
const { topic, event, payload, ref } = data
const callback = () => {
this.encode(data, (result: any) => {
this.conn?.send(result)
})
}
this.log('push', `${topic} ${event} (${ref})`, payload)
if (this.isConnected()) {
callback()
} else {
this.sendBuffer.push(callback)
}
}
/**
* Sets the JWT access token used for channel subscription authorization and Realtime RLS.
*
* If param is null it will use the `accessToken` callback function or the token set on the client.
*
* On callback used, it will set the value of the token internal to the client.
*
* @param token A JWT string to override the token set on the client.
*/
async setAuth(token: string | null = null): Promise<void> {
let tokenToSend =
token ||
(this.accessToken && (await this.accessToken())) ||
this.accessTokenValue
if (tokenToSend) {
let parsed = null
try {
parsed = JSON.parse(atob(tokenToSend.split('.')[1]))
} catch (_error) {}
if (parsed && parsed.exp) {
let now = Math.floor(Date.now() / 1000)
let valid = now - parsed.exp < 0
if (!valid) {
this.log(
'auth',
`InvalidJWTToken: Invalid value for JWT claim "exp" with value ${parsed.exp}`
)
return Promise.reject(
`InvalidJWTToken: Invalid value for JWT claim "exp" with value ${parsed.exp}`
)
}
}
this.accessTokenValue = tokenToSend
this.channels.forEach((channel) => {
tokenToSend && channel.updateJoinPayload({ access_token: tokenToSend })
if (channel.joinedOnce && channel._isJoined()) {
channel._push(CHANNEL_EVENTS.access_token, {
access_token: tokenToSend,
})
}
})
}
}
/**
* Sends a heartbeat message if the socket is connected.
*/
async sendHeartbeat() {
if (!this.isConnected()) {
return
}
if (this.pendingHeartbeatRef) {
this.pendingHeartbeatRef = null
this.log(
'transport',
'heartbeat timeout. Attempting to re-establish connection'
)
this.conn?.close(WS_CLOSE_NORMAL, 'hearbeat timeout')
return
}
this.pendingHeartbeatRef = this._makeRef()
this.push({
topic: 'phoenix',
event: 'heartbeat',
payload: {},
ref: this.pendingHeartbeatRef,
})
this.setAuth()
}
/**
* Flushes send buffer
*/
flushSendBuffer() {
if (this.isConnected() && this.sendBuffer.length > 0) {
this.sendBuffer.forEach((callback) => callback())
this.sendBuffer = []
}
}
/**
* Use either custom fetch, if provided, or default fetch to make HTTP requests
*
* @internal
*/
_resolveFetch = (customFetch?: Fetch): Fetch => {
let _fetch: Fetch
if (customFetch) {
_fetch = customFetch
} else if (typeof fetch === 'undefined') {
_fetch = (...args) =>
import('@supabase/node-fetch' as any).then(({ default: fetch }) =>
fetch(...args)
)
} else {
_fetch = fetch
}
return (...args) => _fetch(...args)
}
/**
* Return the next message ref, accounting for overflows
*
* @internal
*/
_makeRef(): string {
let newRef = this.ref + 1
if (newRef === this.ref) {
this.ref = 0
} else {
this.ref = newRef
}
return this.ref.toString()
}
/**
* Unsubscribe from channels with the specified topic.
*
* @internal
*/
_leaveOpenTopic(topic: string): void {
let dupChannel = this.channels.find(
(c) => c.topic === topic && (c._isJoined() || c._isJoining())
)
if (dupChannel) {
this.log('transport', `leaving duplicate topic "${topic}"`)
dupChannel.unsubscribe()
}
}
/**
* Removes a subscription from the socket.
*
* @param channel An open subscription.
*
* @internal
*/
_remove(channel: RealtimeChannel) {
this.channels = this.channels.filter(
(c: RealtimeChannel) => c._joinRef() !== channel._joinRef()
)
}
/**
* Sets up connection handlers.
*
* @internal
*/
private setupConnection(): void {
if (this.conn) {
this.conn.binaryType = 'arraybuffer'
this.conn.onopen = () => this._onConnOpen()
this.conn.onerror = (error: WebSocketLikeError) =>
this._onConnError(error as WebSocketLikeError)
this.conn.onmessage = (event: any) => this._onConnMessage(event)
this.conn.onclose = (event: any) => this._onConnClose(event)
}
}
/** @internal */
private _onConnMessage(rawMessage: { data: any }) {
this.decode(rawMessage.data, (msg: RealtimeMessage) => {
let { topic, event, payload, ref } = msg
if (ref && ref === this.pendingHeartbeatRef) {
this.pendingHeartbeatRef = null
}
this.log(
'receive',
`${payload.status || ''} ${topic} ${event} ${
(ref && '(' + ref + ')') || ''
}`,
payload
)
this.channels
.filter((channel: RealtimeChannel) => channel._isMember(topic))
.forEach((channel: RealtimeChannel) =>
channel._trigger(event, payload, ref)
)
this.stateChangeCallbacks.message.forEach((callback) => callback(msg))
})
}
/** @internal */
private async _onConnOpen() {
this.log('transport', `connected to ${this.endpointURL()}`)
this.flushSendBuffer()
this.reconnectTimer.reset()
if (!this.worker) {
this.heartbeatTimer && clearInterval(this.heartbeatTimer)
this.heartbeatTimer = setInterval(
() => this.sendHeartbeat(),
this.heartbeatIntervalMs
)
} else {
if (this.workerUrl) {
this.log('worker', `starting worker for from ${this.workerUrl}`)
} else {
this.log('worker', `starting default worker`)
}
const objectUrl = this._workerObjectUrl(this.workerUrl!)
this.workerRef = new Worker(objectUrl)
this.workerRef.onerror = (error) => {
this.log('worker', 'worker error', error.message)
this.workerRef!.terminate()
}
this.workerRef.onmessage = (event) => {
if (event.data.event === 'keepAlive') {
this.sendHeartbeat()
}
}
this.workerRef.postMessage({
event: 'start',
interval: this.heartbeatIntervalMs,
})
}
this.stateChangeCallbacks.open.forEach((callback) => callback())!
}
/** @internal */
private _onConnClose(event: any) {
this.log('transport', 'close', event)
this._triggerChanError()
this.heartbeatTimer && clearInterval(this.heartbeatTimer)
this.reconnectTimer.scheduleTimeout()
this.stateChangeCallbacks.close.forEach((callback) => callback(event))
}
/** @internal */
private _onConnError(error: WebSocketLikeError) {
this.log('transport', error.message)
this._triggerChanError()
this.stateChangeCallbacks.error.forEach((callback) => callback(error))
}
/** @internal */
private _triggerChanError() {
this.channels.forEach((channel: RealtimeChannel) =>
channel._trigger(CHANNEL_EVENTS.error)
)
}
/** @internal */
private _appendParams(
url: string,
params: { [key: string]: string }
): string {
if (Object.keys(params).length === 0) {
return url
}
const prefix = url.match(/\?/) ? '&' : '?'
const query = new URLSearchParams(params)
return `${url}${prefix}${query}`
}
private _workerObjectUrl(url: string | undefined): string {
let result_url: string
if (url) {
result_url = url
} else {
const blob = new Blob([WORKER_SCRIPT], { type: 'application/javascript' })
result_url = URL.createObjectURL(blob)
}
return result_url
}
}
class WSWebSocketDummy {
binaryType: string = 'arraybuffer'
close: Function
onclose: Function = () => {}
onerror: Function = () => {}
onmessage: Function = () => {}
onopen: Function = () => {}
readyState: number = SOCKET_STATES.connecting
send: Function = () => {}
url: string | URL | null = null
constructor(
address: string,
_protocols: undefined,
options: { close: Function }
) {
this.url = address
this.close = options.close
}
}

View File

@ -0,0 +1,364 @@
/*
This file draws heavily from https://github.com/phoenixframework/phoenix/blob/d344ec0a732ab4ee204215b31de69cf4be72e3bf/assets/js/phoenix/presence.js
License: https://github.com/phoenixframework/phoenix/blob/d344ec0a732ab4ee204215b31de69cf4be72e3bf/LICENSE.md
*/
import type {
PresenceOpts,
PresenceOnJoinCallback,
PresenceOnLeaveCallback,
} from 'phoenix'
import type RealtimeChannel from './RealtimeChannel'
type Presence<T extends { [key: string]: any } = {}> = {
presence_ref: string
} & T
export type RealtimePresenceState<T extends { [key: string]: any } = {}> = {
[key: string]: Presence<T>[]
}
export type RealtimePresenceJoinPayload<T extends { [key: string]: any }> = {
event: `${REALTIME_PRESENCE_LISTEN_EVENTS.JOIN}`
key: string
currentPresences: Presence<T>[]
newPresences: Presence<T>[]
}
export type RealtimePresenceLeavePayload<T extends { [key: string]: any }> = {
event: `${REALTIME_PRESENCE_LISTEN_EVENTS.LEAVE}`
key: string
currentPresences: Presence<T>[]
leftPresences: Presence<T>[]
}
export enum REALTIME_PRESENCE_LISTEN_EVENTS {
SYNC = 'sync',
JOIN = 'join',
LEAVE = 'leave',
}
type PresenceDiff = {
joins: RealtimePresenceState
leaves: RealtimePresenceState
}
type RawPresenceState = {
[key: string]: {
metas: {
phx_ref?: string
phx_ref_prev?: string
[key: string]: any
}[]
}
}
type RawPresenceDiff = {
joins: RawPresenceState
leaves: RawPresenceState
}
type PresenceChooser<T> = (key: string, presences: Presence[]) => T
export default class RealtimePresence {
state: RealtimePresenceState = {}
pendingDiffs: RawPresenceDiff[] = []
joinRef: string | null = null
caller: {
onJoin: PresenceOnJoinCallback
onLeave: PresenceOnLeaveCallback
onSync: () => void
} = {
onJoin: () => {},
onLeave: () => {},
onSync: () => {},
}
/**
* Initializes the Presence.
*
* @param channel - The RealtimeChannel
* @param opts - The options,
* for example `{events: {state: 'state', diff: 'diff'}}`
*/
constructor(public channel: RealtimeChannel, opts?: PresenceOpts) {
const events = opts?.events || {
state: 'presence_state',
diff: 'presence_diff',
}
this.channel._on(events.state, {}, (newState: RawPresenceState) => {
const { onJoin, onLeave, onSync } = this.caller
this.joinRef = this.channel._joinRef()
this.state = RealtimePresence.syncState(
this.state,
newState,
onJoin,
onLeave
)
this.pendingDiffs.forEach((diff) => {
this.state = RealtimePresence.syncDiff(
this.state,
diff,
onJoin,
onLeave
)
})
this.pendingDiffs = []
onSync()
})
this.channel._on(events.diff, {}, (diff: RawPresenceDiff) => {
const { onJoin, onLeave, onSync } = this.caller
if (this.inPendingSyncState()) {
this.pendingDiffs.push(diff)
} else {
this.state = RealtimePresence.syncDiff(
this.state,
diff,
onJoin,
onLeave
)
onSync()
}
})
this.onJoin((key, currentPresences, newPresences) => {
this.channel._trigger('presence', {
event: 'join',
key,
currentPresences,
newPresences,
})
})
this.onLeave((key, currentPresences, leftPresences) => {
this.channel._trigger('presence', {
event: 'leave',
key,
currentPresences,
leftPresences,
})
})
this.onSync(() => {
this.channel._trigger('presence', { event: 'sync' })
})
}
/**
* Used to sync the list of presences on the server with the
* client's state.
*
* An optional `onJoin` and `onLeave` callback can be provided to
* react to changes in the client's local presences across
* disconnects and reconnects with the server.
*
* @internal
*/
private static syncState(
currentState: RealtimePresenceState,
newState: RawPresenceState | RealtimePresenceState,
onJoin: PresenceOnJoinCallback,
onLeave: PresenceOnLeaveCallback
): RealtimePresenceState {
const state = this.cloneDeep(currentState)
const transformedState = this.transformState(newState)
const joins: RealtimePresenceState = {}
const leaves: RealtimePresenceState = {}
this.map(state, (key: string, presences: Presence[]) => {
if (!transformedState[key]) {
leaves[key] = presences
}
})
this.map(transformedState, (key, newPresences: Presence[]) => {
const currentPresences: Presence[] = state[key]
if (currentPresences) {
const newPresenceRefs = newPresences.map(
(m: Presence) => m.presence_ref
)
const curPresenceRefs = currentPresences.map(
(m: Presence) => m.presence_ref
)
const joinedPresences: Presence[] = newPresences.filter(
(m: Presence) => curPresenceRefs.indexOf(m.presence_ref) < 0
)
const leftPresences: Presence[] = currentPresences.filter(
(m: Presence) => newPresenceRefs.indexOf(m.presence_ref) < 0
)
if (joinedPresences.length > 0) {
joins[key] = joinedPresences
}
if (leftPresences.length > 0) {
leaves[key] = leftPresences
}
} else {
joins[key] = newPresences
}
})
return this.syncDiff(state, { joins, leaves }, onJoin, onLeave)
}
/**
* Used to sync a diff of presence join and leave events from the
* server, as they happen.
*
* Like `syncState`, `syncDiff` accepts optional `onJoin` and
* `onLeave` callbacks to react to a user joining or leaving from a
* device.
*
* @internal
*/
private static syncDiff(
state: RealtimePresenceState,
diff: RawPresenceDiff | PresenceDiff,
onJoin: PresenceOnJoinCallback,
onLeave: PresenceOnLeaveCallback
): RealtimePresenceState {
const { joins, leaves } = {
joins: this.transformState(diff.joins),
leaves: this.transformState(diff.leaves),
}
if (!onJoin) {
onJoin = () => {}
}
if (!onLeave) {
onLeave = () => {}
}
this.map(joins, (key, newPresences: Presence[]) => {
const currentPresences: Presence[] = state[key] ?? []
state[key] = this.cloneDeep(newPresences)
if (currentPresences.length > 0) {
const joinedPresenceRefs = state[key].map(
(m: Presence) => m.presence_ref
)
const curPresences: Presence[] = currentPresences.filter(
(m: Presence) => joinedPresenceRefs.indexOf(m.presence_ref) < 0
)
state[key].unshift(...curPresences)
}
onJoin(key, currentPresences, newPresences)
})
this.map(leaves, (key, leftPresences: Presence[]) => {
let currentPresences: Presence[] = state[key]
if (!currentPresences) return
const presenceRefsToRemove = leftPresences.map(
(m: Presence) => m.presence_ref
)
currentPresences = currentPresences.filter(
(m: Presence) => presenceRefsToRemove.indexOf(m.presence_ref) < 0
)
state[key] = currentPresences
onLeave(key, currentPresences, leftPresences)
if (currentPresences.length === 0) delete state[key]
})
return state
}
/** @internal */
private static map<T = any>(
obj: RealtimePresenceState,
func: PresenceChooser<T>
): T[] {
return Object.getOwnPropertyNames(obj).map((key) => func(key, obj[key]))
}
/**
* Remove 'metas' key
* Change 'phx_ref' to 'presence_ref'
* Remove 'phx_ref' and 'phx_ref_prev'
*
* @example
* // returns {
* abc123: [
* { presence_ref: '2', user_id: 1 },
* { presence_ref: '3', user_id: 2 }
* ]
* }
* RealtimePresence.transformState({
* abc123: {
* metas: [
* { phx_ref: '2', phx_ref_prev: '1' user_id: 1 },
* { phx_ref: '3', user_id: 2 }
* ]
* }
* })
*
* @internal
*/
private static transformState(
state: RawPresenceState | RealtimePresenceState
): RealtimePresenceState {
state = this.cloneDeep(state)
return Object.getOwnPropertyNames(state).reduce((newState, key) => {
const presences = state[key]
if ('metas' in presences) {
newState[key] = presences.metas.map((presence) => {
presence['presence_ref'] = presence['phx_ref']
delete presence['phx_ref']
delete presence['phx_ref_prev']
return presence
}) as Presence[]
} else {
newState[key] = presences
}
return newState
}, {} as RealtimePresenceState)
}
/** @internal */
private static cloneDeep(obj: { [key: string]: any }) {
return JSON.parse(JSON.stringify(obj))
}
/** @internal */
private onJoin(callback: PresenceOnJoinCallback): void {
this.caller.onJoin = callback
}
/** @internal */
private onLeave(callback: PresenceOnLeaveCallback): void {
this.caller.onLeave = callback
}
/** @internal */
private onSync(callback: () => void): void {
this.caller.onSync = callback
}
/** @internal */
private inPendingSyncState(): boolean {
return !this.joinRef || this.joinRef !== this.channel._joinRef()
}
}

48
node_modules/@supabase/realtime-js/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,48 @@
import RealtimeClient, {
RealtimeClientOptions,
RealtimeMessage,
RealtimeRemoveChannelResponse,
} from './RealtimeClient'
import RealtimeChannel, {
RealtimeChannelOptions,
RealtimeChannelSendResponse,
RealtimePostgresChangesFilter,
RealtimePostgresChangesPayload,
RealtimePostgresInsertPayload,
RealtimePostgresUpdatePayload,
RealtimePostgresDeletePayload,
REALTIME_LISTEN_TYPES,
REALTIME_POSTGRES_CHANGES_LISTEN_EVENT,
REALTIME_SUBSCRIBE_STATES,
REALTIME_CHANNEL_STATES,
} from './RealtimeChannel'
import RealtimePresence, {
RealtimePresenceState,
RealtimePresenceJoinPayload,
RealtimePresenceLeavePayload,
REALTIME_PRESENCE_LISTEN_EVENTS,
} from './RealtimePresence'
export {
RealtimePresence,
RealtimeChannel,
RealtimeChannelOptions,
RealtimeChannelSendResponse,
RealtimeClient,
RealtimeClientOptions,
RealtimeMessage,
RealtimePostgresChangesFilter,
RealtimePostgresChangesPayload,
RealtimePostgresInsertPayload,
RealtimePostgresUpdatePayload,
RealtimePostgresDeletePayload,
RealtimePresenceJoinPayload,
RealtimePresenceLeavePayload,
RealtimePresenceState,
RealtimeRemoveChannelResponse,
REALTIME_LISTEN_TYPES,
REALTIME_POSTGRES_CHANGES_LISTEN_EVENT,
REALTIME_PRESENCE_LISTEN_EVENTS,
REALTIME_SUBSCRIBE_STATES,
REALTIME_CHANNEL_STATES,
}

View File

@ -0,0 +1,44 @@
import { version } from './version'
export const DEFAULT_HEADERS = { 'X-Client-Info': `realtime-js/${version}` }
export const VSN: string = '1.0.0'
export const DEFAULT_TIMEOUT = 10000
export const WS_CLOSE_NORMAL = 1000
export enum SOCKET_STATES {
connecting = 0,
open = 1,
closing = 2,
closed = 3,
}
export enum CHANNEL_STATES {
closed = 'closed',
errored = 'errored',
joined = 'joined',
joining = 'joining',
leaving = 'leaving',
}
export enum CHANNEL_EVENTS {
close = 'phx_close',
error = 'phx_error',
join = 'phx_join',
reply = 'phx_reply',
leave = 'phx_leave',
access_token = 'access_token',
}
export enum TRANSPORTS {
websocket = 'websocket',
}
export enum CONNECTION_STATE {
Connecting = 'connecting',
Open = 'open',
Closing = 'closing',
Closed = 'closed',
}

130
node_modules/@supabase/realtime-js/src/lib/push.ts generated vendored Normal file
View File

@ -0,0 +1,130 @@
import { DEFAULT_TIMEOUT } from '../lib/constants'
import type RealtimeChannel from '../RealtimeChannel'
export default class Push {
sent: boolean = false
timeoutTimer: number | undefined = undefined
ref: string = ''
receivedResp: {
status: string
response: { [key: string]: any }
} | null = null
recHooks: {
status: string
callback: Function
}[] = []
refEvent: string | null = null
/**
* Initializes the Push
*
* @param channel The Channel
* @param event The event, for example `"phx_join"`
* @param payload The payload, for example `{user_id: 123}`
* @param timeout The push timeout in milliseconds
*/
constructor(
public channel: RealtimeChannel,
public event: string,
public payload: { [key: string]: any } = {},
public timeout: number = DEFAULT_TIMEOUT
) {}
resend(timeout: number) {
this.timeout = timeout
this._cancelRefEvent()
this.ref = ''
this.refEvent = null
this.receivedResp = null
this.sent = false
this.send()
}
send() {
if (this._hasReceived('timeout')) {
return
}
this.startTimeout()
this.sent = true
this.channel.socket.push({
topic: this.channel.topic,
event: this.event,
payload: this.payload,
ref: this.ref,
join_ref: this.channel._joinRef(),
})
}
updatePayload(payload: { [key: string]: any }): void {
this.payload = { ...this.payload, ...payload }
}
receive(status: string, callback: Function) {
if (this._hasReceived(status)) {
callback(this.receivedResp?.response)
}
this.recHooks.push({ status, callback })
return this
}
startTimeout() {
if (this.timeoutTimer) {
return
}
this.ref = this.channel.socket._makeRef()
this.refEvent = this.channel._replyEventName(this.ref)
const callback = (payload: any) => {
this._cancelRefEvent()
this._cancelTimeout()
this.receivedResp = payload
this._matchReceive(payload)
}
this.channel._on(this.refEvent, {}, callback)
this.timeoutTimer = <any>setTimeout(() => {
this.trigger('timeout', {})
}, this.timeout)
}
trigger(status: string, response: any) {
if (this.refEvent)
this.channel._trigger(this.refEvent, { status, response })
}
destroy() {
this._cancelRefEvent()
this._cancelTimeout()
}
private _cancelRefEvent() {
if (!this.refEvent) {
return
}
this.channel._off(this.refEvent, {})
}
private _cancelTimeout() {
clearTimeout(this.timeoutTimer)
this.timeoutTimer = undefined
}
private _matchReceive({
status,
response,
}: {
status: string
response: Function
}) {
this.recHooks
.filter((h) => h.status === status)
.forEach((h) => h.callback(response))
}
private _hasReceived(status: string) {
return this.receivedResp && this.receivedResp.status === status
}
}

View File

@ -0,0 +1,49 @@
// This file draws heavily from https://github.com/phoenixframework/phoenix/commit/cf098e9cf7a44ee6479d31d911a97d3c7430c6fe
// License: https://github.com/phoenixframework/phoenix/blob/master/LICENSE.md
export default class Serializer {
HEADER_LENGTH = 1
decode(rawPayload: ArrayBuffer | string, callback: Function) {
if (rawPayload.constructor === ArrayBuffer) {
return callback(this._binaryDecode(rawPayload))
}
if (typeof rawPayload === 'string') {
return callback(JSON.parse(rawPayload))
}
return callback({})
}
private _binaryDecode(buffer: ArrayBuffer) {
const view = new DataView(buffer)
const decoder = new TextDecoder()
return this._decodeBroadcast(buffer, view, decoder)
}
private _decodeBroadcast(
buffer: ArrayBuffer,
view: DataView,
decoder: TextDecoder
): {
ref: null
topic: string
event: string
payload: { [key: string]: any }
} {
const topicSize = view.getUint8(1)
const eventSize = view.getUint8(2)
let offset = this.HEADER_LENGTH + 2
const topic = decoder.decode(buffer.slice(offset, offset + topicSize))
offset = offset + topicSize
const event = decoder.decode(buffer.slice(offset, offset + eventSize))
offset = offset + eventSize
const data = JSON.parse(
decoder.decode(buffer.slice(offset, buffer.byteLength))
)
return { ref: null, topic: topic, event: event, payload: data }
}
}

36
node_modules/@supabase/realtime-js/src/lib/timer.ts generated vendored Normal file
View File

@ -0,0 +1,36 @@
/**
* Creates a timer that accepts a `timerCalc` function to perform calculated timeout retries, such as exponential backoff.
*
* @example
* let reconnectTimer = new Timer(() => this.connect(), function(tries){
* return [1000, 5000, 10000][tries - 1] || 10000
* })
* reconnectTimer.scheduleTimeout() // fires after 1000
* reconnectTimer.scheduleTimeout() // fires after 5000
* reconnectTimer.reset()
* reconnectTimer.scheduleTimeout() // fires after 1000
*/
export default class Timer {
timer: number | undefined = undefined
tries: number = 0
constructor(public callback: Function, public timerCalc: Function) {
this.callback = callback
this.timerCalc = timerCalc
}
reset() {
this.tries = 0
clearTimeout(this.timer)
}
// Cancels any previous scheduleTimeout and schedules callback
scheduleTimeout() {
clearTimeout(this.timer)
this.timer = <any>setTimeout(() => {
this.tries = this.tries + 1
this.callback()
}, this.timerCalc(this.tries + 1))
}
}

View File

@ -0,0 +1,254 @@
/**
* Helpers to convert the change Payload into native JS types.
*/
// Adapted from epgsql (src/epgsql_binary.erl), this module licensed under
// 3-clause BSD found here: https://raw.githubusercontent.com/epgsql/epgsql/devel/LICENSE
export enum PostgresTypes {
abstime = 'abstime',
bool = 'bool',
date = 'date',
daterange = 'daterange',
float4 = 'float4',
float8 = 'float8',
int2 = 'int2',
int4 = 'int4',
int4range = 'int4range',
int8 = 'int8',
int8range = 'int8range',
json = 'json',
jsonb = 'jsonb',
money = 'money',
numeric = 'numeric',
oid = 'oid',
reltime = 'reltime',
text = 'text',
time = 'time',
timestamp = 'timestamp',
timestamptz = 'timestamptz',
timetz = 'timetz',
tsrange = 'tsrange',
tstzrange = 'tstzrange',
}
type Columns = {
name: string // the column name. eg: "user_id"
type: string // the column type. eg: "uuid"
flags?: string[] // any special flags for the column. eg: ["key"]
type_modifier?: number // the type modifier. eg: 4294967295
}[]
type BaseValue = null | string | number | boolean
type RecordValue = BaseValue | BaseValue[]
type Record = {
[key: string]: RecordValue
}
/**
* Takes an array of columns and an object of string values then converts each string value
* to its mapped type.
*
* @param {{name: String, type: String}[]} columns
* @param {Object} record
* @param {Object} options The map of various options that can be applied to the mapper
* @param {Array} options.skipTypes The array of types that should not be converted
*
* @example convertChangeData([{name: 'first_name', type: 'text'}, {name: 'age', type: 'int4'}], {first_name: 'Paul', age:'33'}, {})
* //=>{ first_name: 'Paul', age: 33 }
*/
export const convertChangeData = (
columns: Columns,
record: Record,
options: { skipTypes?: string[] } = {}
): Record => {
const skipTypes = options.skipTypes ?? []
return Object.keys(record).reduce((acc, rec_key) => {
acc[rec_key] = convertColumn(rec_key, columns, record, skipTypes)
return acc
}, {} as Record)
}
/**
* Converts the value of an individual column.
*
* @param {String} columnName The column that you want to convert
* @param {{name: String, type: String}[]} columns All of the columns
* @param {Object} record The map of string values
* @param {Array} skipTypes An array of types that should not be converted
* @return {object} Useless information
*
* @example convertColumn('age', [{name: 'first_name', type: 'text'}, {name: 'age', type: 'int4'}], {first_name: 'Paul', age: '33'}, [])
* //=> 33
* @example convertColumn('age', [{name: 'first_name', type: 'text'}, {name: 'age', type: 'int4'}], {first_name: 'Paul', age: '33'}, ['int4'])
* //=> "33"
*/
export const convertColumn = (
columnName: string,
columns: Columns,
record: Record,
skipTypes: string[]
): RecordValue => {
const column = columns.find((x) => x.name === columnName)
const colType = column?.type
const value = record[columnName]
if (colType && !skipTypes.includes(colType)) {
return convertCell(colType, value)
}
return noop(value)
}
/**
* If the value of the cell is `null`, returns null.
* Otherwise converts the string value to the correct type.
* @param {String} type A postgres column type
* @param {String} value The cell value
*
* @example convertCell('bool', 't')
* //=> true
* @example convertCell('int8', '10')
* //=> 10
* @example convertCell('_int4', '{1,2,3,4}')
* //=> [1,2,3,4]
*/
export const convertCell = (type: string, value: RecordValue): RecordValue => {
// if data type is an array
if (type.charAt(0) === '_') {
const dataType = type.slice(1, type.length)
return toArray(value, dataType)
}
// If not null, convert to correct type.
switch (type) {
case PostgresTypes.bool:
return toBoolean(value)
case PostgresTypes.float4:
case PostgresTypes.float8:
case PostgresTypes.int2:
case PostgresTypes.int4:
case PostgresTypes.int8:
case PostgresTypes.numeric:
case PostgresTypes.oid:
return toNumber(value)
case PostgresTypes.json:
case PostgresTypes.jsonb:
return toJson(value)
case PostgresTypes.timestamp:
return toTimestampString(value) // Format to be consistent with PostgREST
case PostgresTypes.abstime: // To allow users to cast it based on Timezone
case PostgresTypes.date: // To allow users to cast it based on Timezone
case PostgresTypes.daterange:
case PostgresTypes.int4range:
case PostgresTypes.int8range:
case PostgresTypes.money:
case PostgresTypes.reltime: // To allow users to cast it based on Timezone
case PostgresTypes.text:
case PostgresTypes.time: // To allow users to cast it based on Timezone
case PostgresTypes.timestamptz: // To allow users to cast it based on Timezone
case PostgresTypes.timetz: // To allow users to cast it based on Timezone
case PostgresTypes.tsrange:
case PostgresTypes.tstzrange:
return noop(value)
default:
// Return the value for remaining types
return noop(value)
}
}
const noop = (value: RecordValue): RecordValue => {
return value
}
export const toBoolean = (value: RecordValue): RecordValue => {
switch (value) {
case 't':
return true
case 'f':
return false
default:
return value
}
}
export const toNumber = (value: RecordValue): RecordValue => {
if (typeof value === 'string') {
const parsedValue = parseFloat(value)
if (!Number.isNaN(parsedValue)) {
return parsedValue
}
}
return value
}
export const toJson = (value: RecordValue): RecordValue => {
if (typeof value === 'string') {
try {
return JSON.parse(value)
} catch (error) {
console.log(`JSON parse error: ${error}`)
return value
}
}
return value
}
/**
* Converts a Postgres Array into a native JS array
*
* @example toArray('{}', 'int4')
* //=> []
* @example toArray('{"[2021-01-01,2021-12-31)","(2021-01-01,2021-12-32]"}', 'daterange')
* //=> ['[2021-01-01,2021-12-31)', '(2021-01-01,2021-12-32]']
* @example toArray([1,2,3,4], 'int4')
* //=> [1,2,3,4]
*/
export const toArray = (value: RecordValue, type: string): RecordValue => {
if (typeof value !== 'string') {
return value
}
const lastIdx = value.length - 1
const closeBrace = value[lastIdx]
const openBrace = value[0]
// Confirm value is a Postgres array by checking curly brackets
if (openBrace === '{' && closeBrace === '}') {
let arr
const valTrim = value.slice(1, lastIdx)
// TODO: find a better solution to separate Postgres array data
try {
arr = JSON.parse('[' + valTrim + ']')
} catch (_) {
// WARNING: splitting on comma does not cover all edge cases
arr = valTrim ? valTrim.split(',') : []
}
return arr.map((val: BaseValue) => convertCell(type, val))
}
return value
}
/**
* Fixes timestamp to be ISO-8601. Swaps the space between the date and time for a 'T'
* See https://github.com/supabase/supabase/issues/18
*
* @example toTimestampString('2019-09-10 00:00:00')
* //=> '2019-09-10T00:00:00'
*/
export const toTimestampString = (value: RecordValue): RecordValue => {
if (typeof value === 'string') {
return value.replace(' ', 'T')
}
return value
}
export const httpEndpointURL = (socketUrl: string): string => {
let url = socketUrl
url = url.replace(/^ws/i, 'http')
url = url.replace(/(\/socket\/websocket|\/socket|\/websocket)\/?$/i, '')
return url.replace(/\/+$/, '')
}

View File

@ -0,0 +1 @@
export const version = '2.11.2'

201
node_modules/@supabase/storage-js/LICENSE generated vendored Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

157
node_modules/@supabase/storage-js/README.md generated vendored Normal file
View File

@ -0,0 +1,157 @@
# `storage-js`
JS Client library to interact with Supabase Storage.
- Documentation: https://supabase.io/docs/reference/javascript/storage-createbucket
## Quick Start Guide
### Installing the module
```bash
npm install @supabase/storage-js
```
### Connecting to the storage backend
```js
import { StorageClient } from '@supabase/storage-js'
const STORAGE_URL = 'https://<project_ref>.supabase.co/storage/v1'
const SERVICE_KEY = '<service_role>' //! service key, not anon key
const storageClient = new StorageClient(STORAGE_URL, {
apikey: SERVICE_KEY,
Authorization: `Bearer ${SERVICE_KEY}`,
})
```
### Handling resources
#### Handling Storage Buckets
- Create a new Storage bucket:
```js
const { data, error } = await storageClient.createBucket(
'test_bucket', // Bucket name (must be unique)
{ public: false } // Bucket options
)
```
- Retrieve the details of an existing Storage bucket:
```js
const { data, error } = await storageClient.getBucket('test_bucket')
```
- Update a new Storage bucket:
```js
const { data, error } = await storageClient.updateBucket(
'test_bucket', // Bucket name
{ public: false } // Bucket options
)
```
- Remove all objects inside a single bucket:
```js
const { data, error } = await storageClient.emptyBucket('test_bucket')
```
- Delete an existing bucket (a bucket can't be deleted with existing objects inside it):
```js
const { data, error } = await storageClient.deleteBucket('test_bucket')
```
- Retrieve the details of all Storage buckets within an existing project:
```js
const { data, error } = await storageClient.listBuckets()
```
#### Handling Files
- Upload a file to an existing bucket:
```js
const fileBody = ... // load your file here
const { data, error } = await storageClient.from('bucket').upload('path/to/file', fileBody)
```
> Note:
> The path in `data.Key` is prefixed by the bucket ID and is not the value which should be passed to the `download` method in order to fetch the file.
> To fetch the file via the `download` method, use `data.path` and `data.bucketId` as follows:
>
> ```javascript
> const { data, error } = await storageClient.from('bucket').upload('/folder/file.txt', fileBody)
> // check for errors
> const { data2, error2 } = await storageClient.from(data.bucketId).download(data.path)
> ```
> Note: The `upload` method also accepts a map of optional parameters. For a complete list see the [Supabase API reference](https://supabase.com/docs/reference/javascript/storage-from-upload).
- Download a file from an exisiting bucket:
```js
const { data, error } = await storageClient.from('bucket').download('path/to/file')
```
- List all the files within a bucket:
```js
const { data, error } = await storageClient.from('bucket').list('folder')
```
> Note: The `list` method also accepts a map of optional parameters. For a complete list see the [Supabase API reference](https://supabase.com/docs/reference/javascript/storage-from-list).
- Replace an existing file at the specified path with a new one:
```js
const fileBody = ... // load your file here
const { data, error } = await storageClient
.from('bucket')
.update('path/to/file', fileBody)
```
> Note: The `upload` method also accepts a map of optional parameters. For a complete list see the [Supabase API reference](https://supabase.com/docs/reference/javascript/storage-from-upload).
- Move an existing file:
```js
const { data, error } = await storageClient
.from('bucket')
.move('old/path/to/file', 'new/path/to/file')
```
- Delete files within the same bucket:
```js
const { data, error } = await storageClient.from('bucket').remove(['path/to/file'])
```
- Create signed URL to download file without requiring permissions:
```js
const expireIn = 60
const { data, error } = await storageClient
.from('bucket')
.createSignedUrl('path/to/file', expireIn)
```
- Retrieve URLs for assets in public buckets:
```js
const { data, error } = await storageClient.from('public-bucket').getPublicUrl('path/to/file')
```
## Sponsors
We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products dont exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone.
[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase)

70
node_modules/@supabase/storage-js/package.json generated vendored Normal file
View File

@ -0,0 +1,70 @@
{
"name": "@supabase/storage-js",
"version": "2.7.1",
"description": "Isomorphic storage client for Supabase.",
"keywords": [
"javascript",
"typescript",
"supabase"
],
"homepage": "https://github.com/supabase/storage-js",
"bugs": "https://github.com/supabase/storage-js/issues",
"license": "MIT",
"author": "Supabase",
"files": [
"dist",
"src"
],
"main": "dist/main/index.js",
"module": "dist/module/index.js",
"types": "dist/module/index.d.ts",
"sideEffects": false,
"repository": "supabase/storage-js",
"scripts": {
"clean": "rimraf dist docs/v2",
"format": "prettier --write \"{src,test}/**/*.ts\"",
"build": "genversion src/lib/version.ts --es6 && run-s clean format build:*",
"build:main": "tsc -p tsconfig.json",
"build:module": "tsc -p tsconfig.module.json",
"build:umd": "webpack",
"types-generate": "dts-gen -m '@supabase/storage-js' -s",
"test": "run-s test:clean test:infra test:suite test:clean",
"test:suite": "jest --runInBand",
"test:infra": "cd infra && docker compose down && docker compose up -d --build && sleep 10",
"test:clean": "cd infra && docker compose down --remove-orphans",
"docs": "typedoc --entryPoints src/index.ts --out docs/v2 --entryPoints src/packages/* --excludePrivate --excludeProtected",
"docs:json": "typedoc --json docs/v2/spec.json --entryPoints src/index.ts --entryPoints src/packages/* --excludePrivate --excludeExternals --excludeProtected"
},
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
},
"devDependencies": {
"@types/jest": "^26.0.13",
"form-data": "^4.0.0",
"genversion": "^3.0.1",
"husky": "^4.3.0",
"jest": "^29.0.0",
"jest-environment-jsdom": "^29.5.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.1.2",
"pretty-quick": "^3.1.0",
"rimraf": "^3.0.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"ts-jest": "^29.0.0",
"ts-loader": "^9.4.2",
"typedoc": "^0.22.16",
"typescript": "^4.6.3",
"webpack": "^5.75.0",
"webpack-cli": "^5.0.1"
},
"husky": {
"hooks": {
"pre-commit": "pretty-quick --staged"
}
},
"jsdelivr": "dist/umd/supabase.js",
"unpkg": "dist/umd/supabase.js",
"publishConfig": {
"access": "public"
}
}

18
node_modules/@supabase/storage-js/src/StorageClient.ts generated vendored Normal file
View File

@ -0,0 +1,18 @@
import StorageFileApi from './packages/StorageFileApi'
import StorageBucketApi from './packages/StorageBucketApi'
import { Fetch } from './lib/fetch'
export class StorageClient extends StorageBucketApi {
constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {
super(url, headers, fetch)
}
/**
* Perform file operation in a bucket.
*
* @param id The bucket id to operate on.
*/
from(id: string): StorageFileApi {
return new StorageFileApi(this.url, this.headers, id, this.fetch)
}
}

3
node_modules/@supabase/storage-js/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,3 @@
export { StorageClient as StorageClient } from './StorageClient'
export * from './lib/types'
export * from './lib/errors'

View File

@ -0,0 +1,2 @@
import { version } from './version'
export const DEFAULT_HEADERS = { 'X-Client-Info': `storage-js/${version}` }

40
node_modules/@supabase/storage-js/src/lib/errors.ts generated vendored Normal file
View File

@ -0,0 +1,40 @@
export class StorageError extends Error {
protected __isStorageError = true
constructor(message: string) {
super(message)
this.name = 'StorageError'
}
}
export function isStorageError(error: unknown): error is StorageError {
return typeof error === 'object' && error !== null && '__isStorageError' in error
}
export class StorageApiError extends StorageError {
status: number
constructor(message: string, status: number) {
super(message)
this.name = 'StorageApiError'
this.status = status
}
toJSON() {
return {
name: this.name,
message: this.message,
status: this.status,
}
}
}
export class StorageUnknownError extends StorageError {
originalError: unknown
constructor(message: string, originalError: unknown) {
super(message)
this.name = 'StorageUnknownError'
this.originalError = originalError
}
}

135
node_modules/@supabase/storage-js/src/lib/fetch.ts generated vendored Normal file
View File

@ -0,0 +1,135 @@
import { StorageApiError, StorageUnknownError } from './errors'
import { resolveResponse } from './helpers'
import { FetchParameters } from './types'
export type Fetch = typeof fetch
export interface FetchOptions {
headers?: {
[key: string]: string
}
noResolveJson?: boolean
}
export type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'HEAD'
const _getErrorMessage = (err: any): string =>
err.msg || err.message || err.error_description || err.error || JSON.stringify(err)
const handleError = async (
error: unknown,
reject: (reason?: any) => void,
options?: FetchOptions
) => {
const Res = await resolveResponse()
if (error instanceof Res && !options?.noResolveJson) {
error
.json()
.then((err) => {
reject(new StorageApiError(_getErrorMessage(err), error.status || 500))
})
.catch((err) => {
reject(new StorageUnknownError(_getErrorMessage(err), err))
})
} else {
reject(new StorageUnknownError(_getErrorMessage(error), error))
}
}
const _getRequestParams = (
method: RequestMethodType,
options?: FetchOptions,
parameters?: FetchParameters,
body?: object
) => {
const params: { [k: string]: any } = { method, headers: options?.headers || {} }
if (method === 'GET') {
return params
}
params.headers = { 'Content-Type': 'application/json', ...options?.headers }
if (body) {
params.body = JSON.stringify(body)
}
return { ...params, ...parameters }
}
async function _handleRequest(
fetcher: Fetch,
method: RequestMethodType,
url: string,
options?: FetchOptions,
parameters?: FetchParameters,
body?: object
): Promise<any> {
return new Promise((resolve, reject) => {
fetcher(url, _getRequestParams(method, options, parameters, body))
.then((result) => {
if (!result.ok) throw result
if (options?.noResolveJson) return result
return result.json()
})
.then((data) => resolve(data))
.catch((error) => handleError(error, reject, options))
})
}
export async function get(
fetcher: Fetch,
url: string,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> {
return _handleRequest(fetcher, 'GET', url, options, parameters)
}
export async function post(
fetcher: Fetch,
url: string,
body: object,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> {
return _handleRequest(fetcher, 'POST', url, options, parameters, body)
}
export async function put(
fetcher: Fetch,
url: string,
body: object,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> {
return _handleRequest(fetcher, 'PUT', url, options, parameters, body)
}
export async function head(
fetcher: Fetch,
url: string,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> {
return _handleRequest(
fetcher,
'HEAD',
url,
{
...options,
noResolveJson: true,
},
parameters
)
}
export async function remove(
fetcher: Fetch,
url: string,
body: object,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> {
return _handleRequest(fetcher, 'DELETE', url, options, parameters, body)
}

39
node_modules/@supabase/storage-js/src/lib/helpers.ts generated vendored Normal file
View File

@ -0,0 +1,39 @@
type Fetch = typeof fetch
export const resolveFetch = (customFetch?: Fetch): Fetch => {
let _fetch: Fetch
if (customFetch) {
_fetch = customFetch
} else if (typeof fetch === 'undefined') {
_fetch = (...args) =>
import('@supabase/node-fetch' as any).then(({ default: fetch }) => fetch(...args))
} else {
_fetch = fetch
}
return (...args) => _fetch(...args)
}
export const resolveResponse = async (): Promise<typeof Response> => {
if (typeof Response === 'undefined') {
// @ts-ignore
return (await import('@supabase/node-fetch' as any)).Response
}
return Response
}
export const recursiveToCamel = (item: Record<string, any>): unknown => {
if (Array.isArray(item)) {
return item.map((el) => recursiveToCamel(el))
} else if (typeof item === 'function' || item !== Object(item)) {
return item
}
const result: Record<string, any> = {}
Object.entries(item).forEach(([key, value]) => {
const newKey = key.replace(/([-_][a-z])/gi, (c) => c.toUpperCase().replace(/[-_]/g, ''))
result[newKey] = recursiveToCamel(value)
})
return result
}

4
node_modules/@supabase/storage-js/src/lib/index.ts generated vendored Normal file
View File

@ -0,0 +1,4 @@
export * from '../packages/StorageBucketApi'
export * from '../packages/StorageFileApi'
export * from './types'
export * from './constants'

149
node_modules/@supabase/storage-js/src/lib/types.ts generated vendored Normal file
View File

@ -0,0 +1,149 @@
export interface Bucket {
id: string
name: string
owner: string
file_size_limit?: number
allowed_mime_types?: string[]
created_at: string
updated_at: string
public: boolean
}
export interface FileObject {
name: string
bucket_id: string
owner: string
id: string
updated_at: string
created_at: string
last_accessed_at: string
metadata: Record<string, any>
buckets: Bucket
}
export interface FileObjectV2 {
id: string
version: string
name: string
bucket_id: string
updated_at: string
created_at: string
last_accessed_at: string
size?: number
cache_control?: string
content_type?: string
etag?: string
last_modified?: string
metadata?: Record<string, any>
}
export interface SortBy {
column?: string
order?: string
}
export interface FileOptions {
/**
* The number of seconds the asset is cached in the browser and in the Supabase CDN. This is set in the `Cache-Control: max-age=<seconds>` header. Defaults to 3600 seconds.
*/
cacheControl?: string
/**
* the `Content-Type` header value. Should be specified if using a `fileBody` that is neither `Blob` nor `File` nor `FormData`, otherwise will default to `text/plain;charset=UTF-8`.
*/
contentType?: string
/**
* When upsert is set to true, the file is overwritten if it exists. When set to false, an error is thrown if the object already exists. Defaults to false.
*/
upsert?: boolean
/**
* The duplex option is a string parameter that enables or disables duplex streaming, allowing for both reading and writing data in the same stream. It can be passed as an option to the fetch() method.
*/
duplex?: string
/**
* The metadata option is an object that allows you to store additional information about the file. This information can be used to filter and search for files. The metadata object can contain any key-value pairs you want to store.
*/
metadata?: Record<string, any>
/**
* Optionally add extra headers
*/
headers?: Record<string, string>
}
export interface DestinationOptions {
destinationBucket?: string
}
export interface SearchOptions {
/**
* The number of files you want to be returned.
*/
limit?: number
/**
* The starting position.
*/
offset?: number
/**
* The column to sort by. Can be any column inside a FileObject.
*/
sortBy?: SortBy
/**
* The search string to filter files by.
*/
search?: string
}
export interface FetchParameters {
/**
* Pass in an AbortController's signal to cancel the request.
*/
signal?: AbortSignal
}
// TODO: need to check for metadata props. The api swagger doesnt have.
export interface Metadata {
name: string
}
export interface TransformOptions {
/**
* The width of the image in pixels.
*/
width?: number
/**
* The height of the image in pixels.
*/
height?: number
/**
* The resize mode can be cover, contain or fill. Defaults to cover.
* Cover resizes the image to maintain it's aspect ratio while filling the entire width and height.
* Contain resizes the image to maintain it's aspect ratio while fitting the entire image within the width and height.
* Fill resizes the image to fill the entire width and height. If the object's aspect ratio does not match the width and height, the image will be stretched to fit.
*/
resize?: 'cover' | 'contain' | 'fill'
/**
* Set the quality of the returned image.
* A number from 20 to 100, with 100 being the highest quality.
* Defaults to 80
*/
quality?: number
/**
* Specify the format of the image requested.
*
* When using 'origin' we force the format to be the same as the original image.
* When this option is not passed in, images are optimized to modern image formats like Webp.
*/
format?: 'origin'
}
type CamelCase<S extends string> = S extends `${infer P1}_${infer P2}${infer P3}`
? `${Lowercase<P1>}${Uppercase<P2>}${CamelCase<P3>}`
: S
export type Camelize<T> = {
[K in keyof T as CamelCase<Extract<K, string>>]: T[K]
}

2
node_modules/@supabase/storage-js/src/lib/version.ts generated vendored Normal file
View File

@ -0,0 +1,2 @@
// generated by genversion
export const version = '2.7.1'

View File

@ -0,0 +1,247 @@
import { DEFAULT_HEADERS } from '../lib/constants'
import { isStorageError, StorageError } from '../lib/errors'
import { Fetch, get, post, put, remove } from '../lib/fetch'
import { resolveFetch } from '../lib/helpers'
import { Bucket } from '../lib/types'
export default class StorageBucketApi {
protected url: string
protected headers: { [key: string]: string }
protected fetch: Fetch
constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {
this.url = url
this.headers = { ...DEFAULT_HEADERS, ...headers }
this.fetch = resolveFetch(fetch)
}
/**
* Retrieves the details of all Storage buckets within an existing project.
*/
async listBuckets(): Promise<
| {
data: Bucket[]
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await get(this.fetch, `${this.url}/bucket`, { headers: this.headers })
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Retrieves the details of an existing Storage bucket.
*
* @param id The unique identifier of the bucket you would like to retrieve.
*/
async getBucket(
id: string
): Promise<
| {
data: Bucket
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await get(this.fetch, `${this.url}/bucket/${id}`, { headers: this.headers })
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Creates a new Storage bucket
*
* @param id A unique identifier for the bucket you are creating.
* @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations. By default, buckets are private.
* @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.
* The global file size limit takes precedence over this value.
* The default value is null, which doesn't set a per bucket file size limit.
* @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.
* The default value is null, which allows files with all mime types to be uploaded.
* Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.
* @returns newly created bucket id
*/
async createBucket(
id: string,
options: {
public: boolean
fileSizeLimit?: number | string | null
allowedMimeTypes?: string[] | null
} = {
public: false,
}
): Promise<
| {
data: Pick<Bucket, 'name'>
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await post(
this.fetch,
`${this.url}/bucket`,
{
id,
name: id,
public: options.public,
file_size_limit: options.fileSizeLimit,
allowed_mime_types: options.allowedMimeTypes,
},
{ headers: this.headers }
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Updates a Storage bucket
*
* @param id A unique identifier for the bucket you are updating.
* @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations.
* @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.
* The global file size limit takes precedence over this value.
* The default value is null, which doesn't set a per bucket file size limit.
* @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.
* The default value is null, which allows files with all mime types to be uploaded.
* Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.
*/
async updateBucket(
id: string,
options: {
public: boolean
fileSizeLimit?: number | string | null
allowedMimeTypes?: string[] | null
}
): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await put(
this.fetch,
`${this.url}/bucket/${id}`,
{
id,
name: id,
public: options.public,
file_size_limit: options.fileSizeLimit,
allowed_mime_types: options.allowedMimeTypes,
},
{ headers: this.headers }
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Removes all objects inside a single bucket.
*
* @param id The unique identifier of the bucket you would like to empty.
*/
async emptyBucket(
id: string
): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await post(
this.fetch,
`${this.url}/bucket/${id}/empty`,
{},
{ headers: this.headers }
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Deletes an existing bucket. A bucket can't be deleted with existing objects inside it.
* You must first `empty()` the bucket.
*
* @param id The unique identifier of the bucket you would like to delete.
*/
async deleteBucket(
id: string
): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await remove(
this.fetch,
`${this.url}/bucket/${id}`,
{},
{ headers: this.headers }
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
}

View File

@ -0,0 +1,834 @@
import { isStorageError, StorageError, StorageUnknownError } from '../lib/errors'
import { Fetch, get, head, post, remove } from '../lib/fetch'
import { recursiveToCamel, resolveFetch } from '../lib/helpers'
import {
FileObject,
FileOptions,
SearchOptions,
FetchParameters,
TransformOptions,
DestinationOptions,
FileObjectV2,
Camelize,
} from '../lib/types'
const DEFAULT_SEARCH_OPTIONS = {
limit: 100,
offset: 0,
sortBy: {
column: 'name',
order: 'asc',
},
}
const DEFAULT_FILE_OPTIONS: FileOptions = {
cacheControl: '3600',
contentType: 'text/plain;charset=UTF-8',
upsert: false,
}
type FileBody =
| ArrayBuffer
| ArrayBufferView
| Blob
| Buffer
| File
| FormData
| NodeJS.ReadableStream
| ReadableStream<Uint8Array>
| URLSearchParams
| string
export default class StorageFileApi {
protected url: string
protected headers: { [key: string]: string }
protected bucketId?: string
protected fetch: Fetch
constructor(
url: string,
headers: { [key: string]: string } = {},
bucketId?: string,
fetch?: Fetch
) {
this.url = url
this.headers = headers
this.bucketId = bucketId
this.fetch = resolveFetch(fetch)
}
/**
* Uploads a file to an existing bucket or replaces an existing file at the specified path with a new one.
*
* @param method HTTP method.
* @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
* @param fileBody The body of the file to be stored in the bucket.
*/
private async uploadOrUpdate(
method: 'POST' | 'PUT',
path: string,
fileBody: FileBody,
fileOptions?: FileOptions
): Promise<
| {
data: { id: string; path: string; fullPath: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
let body
const options = { ...DEFAULT_FILE_OPTIONS, ...fileOptions }
let headers: Record<string, string> = {
...this.headers,
...(method === 'POST' && { 'x-upsert': String(options.upsert as boolean) }),
}
const metadata = options.metadata
if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {
body = new FormData()
body.append('cacheControl', options.cacheControl as string)
if (metadata) {
body.append('metadata', this.encodeMetadata(metadata))
}
body.append('', fileBody)
} else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {
body = fileBody
body.append('cacheControl', options.cacheControl as string)
if (metadata) {
body.append('metadata', this.encodeMetadata(metadata))
}
} else {
body = fileBody
headers['cache-control'] = `max-age=${options.cacheControl}`
headers['content-type'] = options.contentType as string
if (metadata) {
headers['x-metadata'] = this.toBase64(this.encodeMetadata(metadata))
}
}
if (fileOptions?.headers) {
headers = { ...headers, ...fileOptions.headers }
}
const cleanPath = this._removeEmptyFolders(path)
const _path = this._getFinalPath(cleanPath)
const res = await this.fetch(`${this.url}/object/${_path}`, {
method,
body: body as BodyInit,
headers,
...(options?.duplex ? { duplex: options.duplex } : {}),
})
const data = await res.json()
if (res.ok) {
return {
data: { path: cleanPath, id: data.Id, fullPath: data.Key },
error: null,
}
} else {
const error = data
return { data: null, error }
}
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Uploads a file to an existing bucket.
*
* @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
* @param fileBody The body of the file to be stored in the bucket.
*/
async upload(
path: string,
fileBody: FileBody,
fileOptions?: FileOptions
): Promise<
| {
data: { id: string; path: string; fullPath: string }
error: null
}
| {
data: null
error: StorageError
}
> {
return this.uploadOrUpdate('POST', path, fileBody, fileOptions)
}
/**
* Upload a file with a token generated from `createSignedUploadUrl`.
* @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
* @param token The token generated from `createSignedUploadUrl`
* @param fileBody The body of the file to be stored in the bucket.
*/
async uploadToSignedUrl(
path: string,
token: string,
fileBody: FileBody,
fileOptions?: FileOptions
) {
const cleanPath = this._removeEmptyFolders(path)
const _path = this._getFinalPath(cleanPath)
const url = new URL(this.url + `/object/upload/sign/${_path}`)
url.searchParams.set('token', token)
try {
let body
const options = { upsert: DEFAULT_FILE_OPTIONS.upsert, ...fileOptions }
const headers: Record<string, string> = {
...this.headers,
...{ 'x-upsert': String(options.upsert as boolean) },
}
if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {
body = new FormData()
body.append('cacheControl', options.cacheControl as string)
body.append('', fileBody)
} else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {
body = fileBody
body.append('cacheControl', options.cacheControl as string)
} else {
body = fileBody
headers['cache-control'] = `max-age=${options.cacheControl}`
headers['content-type'] = options.contentType as string
}
const res = await this.fetch(url.toString(), {
method: 'PUT',
body: body as BodyInit,
headers,
})
const data = await res.json()
if (res.ok) {
return {
data: { path: cleanPath, fullPath: data.Key },
error: null,
}
} else {
const error = data
return { data: null, error }
}
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Creates a signed upload URL.
* Signed upload URLs can be used to upload files to the bucket without further authentication.
* They are valid for 2 hours.
* @param path The file path, including the current file name. For example `folder/image.png`.
* @param options.upsert If set to true, allows the file to be overwritten if it already exists.
*/
async createSignedUploadUrl(
path: string,
options?: { upsert: boolean }
): Promise<
| {
data: { signedUrl: string; token: string; path: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
let _path = this._getFinalPath(path)
const headers = { ...this.headers }
if (options?.upsert) {
headers['x-upsert'] = 'true'
}
const data = await post(
this.fetch,
`${this.url}/object/upload/sign/${_path}`,
{},
{ headers }
)
const url = new URL(this.url + data.url)
const token = url.searchParams.get('token')
if (!token) {
throw new StorageError('No token returned by API')
}
return { data: { signedUrl: url.toString(), path, token }, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Replaces an existing file at the specified path with a new one.
*
* @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to update.
* @param fileBody The body of the file to be stored in the bucket.
*/
async update(
path: string,
fileBody:
| ArrayBuffer
| ArrayBufferView
| Blob
| Buffer
| File
| FormData
| NodeJS.ReadableStream
| ReadableStream<Uint8Array>
| URLSearchParams
| string,
fileOptions?: FileOptions
): Promise<
| {
data: { id: string; path: string; fullPath: string }
error: null
}
| {
data: null
error: StorageError
}
> {
return this.uploadOrUpdate('PUT', path, fileBody, fileOptions)
}
/**
* Moves an existing file to a new path in the same bucket.
*
* @param fromPath The original file path, including the current file name. For example `folder/image.png`.
* @param toPath The new file path, including the new file name. For example `folder/image-new.png`.
* @param options The destination options.
*/
async move(
fromPath: string,
toPath: string,
options?: DestinationOptions
): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await post(
this.fetch,
`${this.url}/object/move`,
{
bucketId: this.bucketId,
sourceKey: fromPath,
destinationKey: toPath,
destinationBucket: options?.destinationBucket,
},
{ headers: this.headers }
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Copies an existing file to a new path in the same bucket.
*
* @param fromPath The original file path, including the current file name. For example `folder/image.png`.
* @param toPath The new file path, including the new file name. For example `folder/image-copy.png`.
* @param options The destination options.
*/
async copy(
fromPath: string,
toPath: string,
options?: DestinationOptions
): Promise<
| {
data: { path: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await post(
this.fetch,
`${this.url}/object/copy`,
{
bucketId: this.bucketId,
sourceKey: fromPath,
destinationKey: toPath,
destinationBucket: options?.destinationBucket,
},
{ headers: this.headers }
)
return { data: { path: data.Key }, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Creates a signed URL. Use a signed URL to share a file for a fixed amount of time.
*
* @param path The file path, including the current file name. For example `folder/image.png`.
* @param expiresIn The number of seconds until the signed URL expires. For example, `60` for a URL which is valid for one minute.
* @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
* @param options.transform Transform the asset before serving it to the client.
*/
async createSignedUrl(
path: string,
expiresIn: number,
options?: { download?: string | boolean; transform?: TransformOptions }
): Promise<
| {
data: { signedUrl: string }
error: null
}
| {
data: null
error: StorageError
}
> {
try {
let _path = this._getFinalPath(path)
let data = await post(
this.fetch,
`${this.url}/object/sign/${_path}`,
{ expiresIn, ...(options?.transform ? { transform: options.transform } : {}) },
{ headers: this.headers }
)
const downloadQueryParam = options?.download
? `&download=${options.download === true ? '' : options.download}`
: ''
const signedUrl = encodeURI(`${this.url}${data.signedURL}${downloadQueryParam}`)
data = { signedUrl }
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Creates multiple signed URLs. Use a signed URL to share a file for a fixed amount of time.
*
* @param paths The file paths to be downloaded, including the current file names. For example `['folder/image.png', 'folder2/image2.png']`.
* @param expiresIn The number of seconds until the signed URLs expire. For example, `60` for URLs which are valid for one minute.
* @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
*/
async createSignedUrls(
paths: string[],
expiresIn: number,
options?: { download: string | boolean }
): Promise<
| {
data: { error: string | null; path: string | null; signedUrl: string }[]
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await post(
this.fetch,
`${this.url}/object/sign/${this.bucketId}`,
{ expiresIn, paths },
{ headers: this.headers }
)
const downloadQueryParam = options?.download
? `&download=${options.download === true ? '' : options.download}`
: ''
return {
data: data.map((datum: { signedURL: string }) => ({
...datum,
signedUrl: datum.signedURL
? encodeURI(`${this.url}${datum.signedURL}${downloadQueryParam}`)
: null,
})),
error: null,
}
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Downloads a file from a private bucket. For public buckets, make a request to the URL returned from `getPublicUrl` instead.
*
* @param path The full path and file name of the file to be downloaded. For example `folder/image.png`.
* @param options.transform Transform the asset before serving it to the client.
*/
async download(
path: string,
options?: { transform?: TransformOptions }
): Promise<
| {
data: Blob
error: null
}
| {
data: null
error: StorageError
}
> {
const wantsTransformation = typeof options?.transform !== 'undefined'
const renderPath = wantsTransformation ? 'render/image/authenticated' : 'object'
const transformationQuery = this.transformOptsToQueryString(options?.transform || {})
const queryString = transformationQuery ? `?${transformationQuery}` : ''
try {
const _path = this._getFinalPath(path)
const res = await get(this.fetch, `${this.url}/${renderPath}/${_path}${queryString}`, {
headers: this.headers,
noResolveJson: true,
})
const data = await res.blob()
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Retrieves the details of an existing file.
* @param path
*/
async info(
path: string
): Promise<
| {
data: Camelize<FileObjectV2>
error: null
}
| {
data: null
error: StorageError
}
> {
const _path = this._getFinalPath(path)
try {
const data = await get(this.fetch, `${this.url}/object/info/${_path}`, {
headers: this.headers,
})
return { data: recursiveToCamel(data) as Camelize<FileObjectV2>, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Checks the existence of a file.
* @param path
*/
async exists(
path: string
): Promise<
| {
data: boolean
error: null
}
| {
data: boolean
error: StorageError
}
> {
const _path = this._getFinalPath(path)
try {
await head(this.fetch, `${this.url}/object/${_path}`, {
headers: this.headers,
})
return { data: true, error: null }
} catch (error) {
if (isStorageError(error) && error instanceof StorageUnknownError) {
const originalError = (error.originalError as unknown) as { status: number }
if ([400, 404].includes(originalError?.status)) {
return { data: false, error }
}
}
throw error
}
}
/**
* A simple convenience function to get the URL for an asset in a public bucket. If you do not want to use this function, you can construct the public URL by concatenating the bucket URL with the path to the asset.
* This function does not verify if the bucket is public. If a public URL is created for a bucket which is not public, you will not be able to download the asset.
*
* @param path The path and name of the file to generate the public URL for. For example `folder/image.png`.
* @param options.download Triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
* @param options.transform Transform the asset before serving it to the client.
*/
getPublicUrl(
path: string,
options?: { download?: string | boolean; transform?: TransformOptions }
): { data: { publicUrl: string } } {
const _path = this._getFinalPath(path)
const _queryString = []
const downloadQueryParam = options?.download
? `download=${options.download === true ? '' : options.download}`
: ''
if (downloadQueryParam !== '') {
_queryString.push(downloadQueryParam)
}
const wantsTransformation = typeof options?.transform !== 'undefined'
const renderPath = wantsTransformation ? 'render/image' : 'object'
const transformationQuery = this.transformOptsToQueryString(options?.transform || {})
if (transformationQuery !== '') {
_queryString.push(transformationQuery)
}
let queryString = _queryString.join('&')
if (queryString !== '') {
queryString = `?${queryString}`
}
return {
data: { publicUrl: encodeURI(`${this.url}/${renderPath}/public/${_path}${queryString}`) },
}
}
/**
* Deletes files within the same bucket
*
* @param paths An array of files to delete, including the path and file name. For example [`'folder/image.png'`].
*/
async remove(
paths: string[]
): Promise<
| {
data: FileObject[]
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const data = await remove(
this.fetch,
`${this.url}/object/${this.bucketId}`,
{ prefixes: paths },
{ headers: this.headers }
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
/**
* Get file metadata
* @param id the file id to retrieve metadata
*/
// async getMetadata(
// id: string
// ): Promise<
// | {
// data: Metadata
// error: null
// }
// | {
// data: null
// error: StorageError
// }
// > {
// try {
// const data = await get(this.fetch, `${this.url}/metadata/${id}`, { headers: this.headers })
// return { data, error: null }
// } catch (error) {
// if (isStorageError(error)) {
// return { data: null, error }
// }
// throw error
// }
// }
/**
* Update file metadata
* @param id the file id to update metadata
* @param meta the new file metadata
*/
// async updateMetadata(
// id: string,
// meta: Metadata
// ): Promise<
// | {
// data: Metadata
// error: null
// }
// | {
// data: null
// error: StorageError
// }
// > {
// try {
// const data = await post(
// this.fetch,
// `${this.url}/metadata/${id}`,
// { ...meta },
// { headers: this.headers }
// )
// return { data, error: null }
// } catch (error) {
// if (isStorageError(error)) {
// return { data: null, error }
// }
// throw error
// }
// }
/**
* Lists all the files within a bucket.
* @param path The folder path.
*/
async list(
path?: string,
options?: SearchOptions,
parameters?: FetchParameters
): Promise<
| {
data: FileObject[]
error: null
}
| {
data: null
error: StorageError
}
> {
try {
const body = { ...DEFAULT_SEARCH_OPTIONS, ...options, prefix: path || '' }
const data = await post(
this.fetch,
`${this.url}/object/list/${this.bucketId}`,
body,
{ headers: this.headers },
parameters
)
return { data, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
protected encodeMetadata(metadata: Record<string, any>) {
return JSON.stringify(metadata)
}
toBase64(data: string) {
if (typeof Buffer !== 'undefined') {
return Buffer.from(data).toString('base64')
}
return btoa(data)
}
private _getFinalPath(path: string) {
return `${this.bucketId}/${path}`
}
private _removeEmptyFolders(path: string) {
return path.replace(/^\/|\/$/g, '').replace(/\/+/g, '/')
}
private transformOptsToQueryString(transform: TransformOptions) {
const params = []
if (transform.width) {
params.push(`width=${transform.width}`)
}
if (transform.height) {
params.push(`height=${transform.height}`)
}
if (transform.resize) {
params.push(`resize=${transform.resize}`)
}
if (transform.format) {
params.push(`format=${transform.format}`)
}
if (transform.quality) {
params.push(`quality=${transform.quality}`)
}
return params.join('&')
}
}

21
node_modules/@supabase/supabase-js/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Supabase
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

94
node_modules/@supabase/supabase-js/README.md generated vendored Normal file
View File

@ -0,0 +1,94 @@
# `supabase-js` - Isomorphic JavaScript Client for Supabase.
- **Documentation:** https://supabase.com/docs/reference/javascript/start
- TypeDoc: https://supabase.github.io/supabase-js/v2/
## Usage
First of all, you need to install the library:
```sh
npm install @supabase/supabase-js
```
Then you're able to import the library and establish the connection with the database:
```js
import { createClient } from '@supabase/supabase-js'
// Create a single supabase client for interacting with your database
const supabase = createClient('https://xyzcompany.supabase.co', 'public-anon-key')
```
### UMD
You can use plain `<script>`s to import supabase-js from CDNs, like:
```html
<script src="https://cdn.jsdelivr.net/npm/@supabase/supabase-js@2"></script>
```
or even:
```html
<script src="https://unpkg.com/@supabase/supabase-js@2"></script>
```
Then you can use it from a global `supabase` variable:
```html
<script>
const { createClient } = supabase
const _supabase = createClient('https://xyzcompany.supabase.co', 'public-anon-key')
console.log('Supabase Instance: ', _supabase)
// ...
</script>
```
### ESM
You can use `<script type="module">` to import supabase-js from CDNs, like:
```html
<script type="module">
import { createClient } from 'https://cdn.jsdelivr.net/npm/@supabase/supabase-js/+esm'
const supabase = createClient('https://xyzcompany.supabase.co', 'public-anon-key')
console.log('Supabase Instance: ', supabase)
// ...
</script>
```
### Deno
You can use supabase-js in the Deno runtime via [JSR](https://jsr.io/@supabase/supabase-js):
```js
import { createClient } from 'jsr:@supabase/supabase-js@2'
```
### Custom `fetch` implementation
`supabase-js` uses the [`cross-fetch`](https://www.npmjs.com/package/cross-fetch) library to make HTTP requests, but an alternative `fetch` implementation can be provided as an option. This is most useful in environments where `cross-fetch` is not compatible, for instance Cloudflare Workers:
```js
import { createClient } from '@supabase/supabase-js'
// Provide a custom `fetch` implementation as an option
const supabase = createClient('https://xyzcompany.supabase.co', 'public-anon-key', {
global: {
fetch: (...args) => fetch(...args),
},
})
```
## Sponsors
We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products dont exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone.
[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase)
## Badges
[![Coverage Status](https://coveralls.io/repos/github/supabase/supabase-js/badge.svg?branch=master)](https://coveralls.io/github/supabase/supabase-js?branch=master)

77
node_modules/@supabase/supabase-js/package.json generated vendored Normal file
View File

@ -0,0 +1,77 @@
{
"name": "@supabase/supabase-js",
"version": "2.49.1",
"description": "Isomorphic Javascript client for Supabase",
"keywords": [
"javascript",
"typescript",
"supabase"
],
"homepage": "https://github.com/supabase/supabase-js",
"bugs": "https://github.com/supabase/supabase-js/issues",
"license": "MIT",
"author": "Supabase",
"files": [
"dist",
"src"
],
"main": "dist/main/index.js",
"module": "dist/module/index.js",
"types": "dist/module/index.d.ts",
"sideEffects": false,
"repository": "supabase/supabase-js",
"scripts": {
"clean": "rimraf dist docs/v2",
"format": "prettier --write \"{src,test}/**/*.ts\"",
"build": "run-s clean format build:*",
"build:main": "tsc -p tsconfig.json",
"build:module": "tsc -p tsconfig.module.json",
"build:umd": "webpack",
"types-generate": "dts-gen -m '@supabase/supabase-js' -s",
"test": "run-s test:types test:run",
"test:run": "jest --runInBand",
"test:coverage": "jest --runInBand --coverage",
"test:db": "cd infra/db && docker-compose down && docker-compose up -d && sleep 5",
"test:watch": "jest --watch --verbose false --silent false",
"test:clean": "cd infra/db && docker-compose down",
"test:types": "run-s build:module && tsd --files test/*.test-d.ts",
"docs": "typedoc --entryPoints src/index.ts --out docs/v2 --includes src/**/*.ts",
"docs:json": "typedoc --entryPoints src/index.ts --includes src/**/*.ts --json docs/v2/spec.json --excludeExternals",
"serve:coverage": "npm run test:coverage && serve test/coverage"
},
"dependencies": {
"@supabase/auth-js": "2.68.0",
"@supabase/functions-js": "2.4.4",
"@supabase/node-fetch": "2.6.15",
"@supabase/postgrest-js": "1.19.2",
"@supabase/realtime-js": "2.11.2",
"@supabase/storage-js": "2.7.1"
},
"devDependencies": {
"@sebbo2002/semantic-release-jsr": "^1.0.0",
"@types/jest": "^29.2.5",
"husky": "^4.3.0",
"jest": "^29.3.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.5.1",
"pretty-quick": "^3.1.3",
"rimraf": "^3.0.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"serve": "^14.2.1",
"ts-jest": "^29.0.5",
"ts-loader": "^8.0.11",
"ts-node": "^10.9.1",
"tsd": "^0.30.4",
"typedoc": "^0.22.16",
"typescript": "^4.5.5",
"webpack": "^5.69.1",
"webpack-cli": "^4.9.2"
},
"husky": {
"hooks": {
"pre-commit": "pretty-quick --staged"
}
},
"jsdelivr": "dist/umd/supabase.js",
"unpkg": "dist/umd/supabase.js"
}

View File

@ -0,0 +1,345 @@
import { FunctionsClient } from '@supabase/functions-js'
import { AuthChangeEvent } from '@supabase/auth-js'
import {
PostgrestClient,
PostgrestFilterBuilder,
PostgrestQueryBuilder,
} from '@supabase/postgrest-js'
import {
RealtimeChannel,
RealtimeChannelOptions,
RealtimeClient,
RealtimeClientOptions,
} from '@supabase/realtime-js'
import { StorageClient as SupabaseStorageClient } from '@supabase/storage-js'
import {
DEFAULT_GLOBAL_OPTIONS,
DEFAULT_DB_OPTIONS,
DEFAULT_AUTH_OPTIONS,
DEFAULT_REALTIME_OPTIONS,
} from './lib/constants'
import { fetchWithAuth } from './lib/fetch'
import { stripTrailingSlash, applySettingDefaults } from './lib/helpers'
import { SupabaseAuthClient } from './lib/SupabaseAuthClient'
import { Fetch, GenericSchema, SupabaseClientOptions, SupabaseAuthClientOptions } from './lib/types'
/**
* Supabase Client.
*
* An isomorphic Javascript client for interacting with Postgres.
*/
export default class SupabaseClient<
Database = any,
SchemaName extends string & keyof Database = 'public' extends keyof Database
? 'public'
: string & keyof Database,
Schema extends GenericSchema = Database[SchemaName] extends GenericSchema
? Database[SchemaName]
: any
> {
/**
* Supabase Auth allows you to create and manage user sessions for access to data that is secured by access policies.
*/
auth: SupabaseAuthClient
realtime: RealtimeClient
protected realtimeUrl: string
protected authUrl: string
protected storageUrl: string
protected functionsUrl: string
protected rest: PostgrestClient<Database, SchemaName, Schema>
protected storageKey: string
protected fetch?: Fetch
protected changedAccessToken?: string
protected accessToken?: () => Promise<string | null>
protected headers: Record<string, string>
/**
* Create a new client for use in the browser.
* @param supabaseUrl The unique Supabase URL which is supplied when you create a new project in your project dashboard.
* @param supabaseKey The unique Supabase Key which is supplied when you create a new project in your project dashboard.
* @param options.db.schema You can switch in between schemas. The schema needs to be on the list of exposed schemas inside Supabase.
* @param options.auth.autoRefreshToken Set to "true" if you want to automatically refresh the token before expiring.
* @param options.auth.persistSession Set to "true" if you want to automatically save the user session into local storage.
* @param options.auth.detectSessionInUrl Set to "true" if you want to automatically detects OAuth grants in the URL and signs in the user.
* @param options.realtime Options passed along to realtime-js constructor.
* @param options.global.fetch A custom fetch implementation.
* @param options.global.headers Any additional headers to send with each network request.
*/
constructor(
protected supabaseUrl: string,
protected supabaseKey: string,
options?: SupabaseClientOptions<SchemaName>
) {
if (!supabaseUrl) throw new Error('supabaseUrl is required.')
if (!supabaseKey) throw new Error('supabaseKey is required.')
const _supabaseUrl = stripTrailingSlash(supabaseUrl)
this.realtimeUrl = `${_supabaseUrl}/realtime/v1`.replace(/^http/i, 'ws')
this.authUrl = `${_supabaseUrl}/auth/v1`
this.storageUrl = `${_supabaseUrl}/storage/v1`
this.functionsUrl = `${_supabaseUrl}/functions/v1`
// default storage key uses the supabase project ref as a namespace
const defaultStorageKey = `sb-${new URL(this.authUrl).hostname.split('.')[0]}-auth-token`
const DEFAULTS = {
db: DEFAULT_DB_OPTIONS,
realtime: DEFAULT_REALTIME_OPTIONS,
auth: { ...DEFAULT_AUTH_OPTIONS, storageKey: defaultStorageKey },
global: DEFAULT_GLOBAL_OPTIONS,
}
const settings = applySettingDefaults(options ?? {}, DEFAULTS)
this.storageKey = settings.auth.storageKey ?? ''
this.headers = settings.global.headers ?? {}
if (!settings.accessToken) {
this.auth = this._initSupabaseAuthClient(
settings.auth ?? {},
this.headers,
settings.global.fetch
)
} else {
this.accessToken = settings.accessToken
this.auth = new Proxy<SupabaseAuthClient>({} as any, {
get: (_, prop) => {
throw new Error(
`@supabase/supabase-js: Supabase Client is configured with the accessToken option, accessing supabase.auth.${String(
prop
)} is not possible`
)
},
})
}
this.fetch = fetchWithAuth(supabaseKey, this._getAccessToken.bind(this), settings.global.fetch)
this.realtime = this._initRealtimeClient({
headers: this.headers,
accessToken: this._getAccessToken.bind(this),
...settings.realtime,
})
this.rest = new PostgrestClient(`${_supabaseUrl}/rest/v1`, {
headers: this.headers,
schema: settings.db.schema,
fetch: this.fetch,
})
if (!settings.accessToken) {
this._listenForAuthEvents()
}
}
/**
* Supabase Functions allows you to deploy and invoke edge functions.
*/
get functions(): FunctionsClient {
return new FunctionsClient(this.functionsUrl, {
headers: this.headers,
customFetch: this.fetch,
})
}
/**
* Supabase Storage allows you to manage user-generated content, such as photos or videos.
*/
get storage(): SupabaseStorageClient {
return new SupabaseStorageClient(this.storageUrl, this.headers, this.fetch)
}
// NOTE: signatures must be kept in sync with PostgrestClient.from
from<
TableName extends string & keyof Schema['Tables'],
Table extends Schema['Tables'][TableName]
>(relation: TableName): PostgrestQueryBuilder<Schema, Table, TableName>
from<ViewName extends string & keyof Schema['Views'], View extends Schema['Views'][ViewName]>(
relation: ViewName
): PostgrestQueryBuilder<Schema, View, ViewName>
/**
* Perform a query on a table or a view.
*
* @param relation - The table or view name to query
*/
from(relation: string): PostgrestQueryBuilder<Schema, any, any> {
return this.rest.from(relation)
}
// NOTE: signatures must be kept in sync with PostgrestClient.schema
/**
* Select a schema to query or perform an function (rpc) call.
*
* The schema needs to be on the list of exposed schemas inside Supabase.
*
* @param schema - The schema to query
*/
schema<DynamicSchema extends string & keyof Database>(
schema: DynamicSchema
): PostgrestClient<
Database,
DynamicSchema,
Database[DynamicSchema] extends GenericSchema ? Database[DynamicSchema] : any
> {
return this.rest.schema<DynamicSchema>(schema)
}
// NOTE: signatures must be kept in sync with PostgrestClient.rpc
/**
* Perform a function call.
*
* @param fn - The function name to call
* @param args - The arguments to pass to the function call
* @param options - Named parameters
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
* @param options.get - When set to `true`, the function will be called with
* read-only access mode.
* @param options.count - Count algorithm to use to count rows returned by the
* function. Only applicable for [set-returning
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
rpc<FnName extends string & keyof Schema['Functions'], Fn extends Schema['Functions'][FnName]>(
fn: FnName,
args: Fn['Args'] = {},
options: {
head?: boolean
get?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<
Schema,
Fn['Returns'] extends any[]
? Fn['Returns'][number] extends Record<string, unknown>
? Fn['Returns'][number]
: never
: never,
Fn['Returns'],
FnName,
null
> {
return this.rest.rpc(fn, args, options)
}
/**
* Creates a Realtime channel with Broadcast, Presence, and Postgres Changes.
*
* @param {string} name - The name of the Realtime channel.
* @param {Object} opts - The options to pass to the Realtime channel.
*
*/
channel(name: string, opts: RealtimeChannelOptions = { config: {} }): RealtimeChannel {
return this.realtime.channel(name, opts)
}
/**
* Returns all Realtime channels.
*/
getChannels(): RealtimeChannel[] {
return this.realtime.getChannels()
}
/**
* Unsubscribes and removes Realtime channel from Realtime client.
*
* @param {RealtimeChannel} channel - The name of the Realtime channel.
*
*/
removeChannel(channel: RealtimeChannel): Promise<'ok' | 'timed out' | 'error'> {
return this.realtime.removeChannel(channel)
}
/**
* Unsubscribes and removes all Realtime channels from Realtime client.
*/
removeAllChannels(): Promise<('ok' | 'timed out' | 'error')[]> {
return this.realtime.removeAllChannels()
}
private async _getAccessToken() {
if (this.accessToken) {
return await this.accessToken()
}
const { data } = await this.auth.getSession()
return data.session?.access_token ?? null
}
private _initSupabaseAuthClient(
{
autoRefreshToken,
persistSession,
detectSessionInUrl,
storage,
storageKey,
flowType,
lock,
debug,
}: SupabaseAuthClientOptions,
headers?: Record<string, string>,
fetch?: Fetch
) {
const authHeaders = {
Authorization: `Bearer ${this.supabaseKey}`,
apikey: `${this.supabaseKey}`,
}
return new SupabaseAuthClient({
url: this.authUrl,
headers: { ...authHeaders, ...headers },
storageKey: storageKey,
autoRefreshToken,
persistSession,
detectSessionInUrl,
storage,
flowType,
lock,
debug,
fetch,
// auth checks if there is a custom authorizaiton header using this flag
// so it knows whether to return an error when getUser is called with no session
hasCustomAuthorizationHeader: 'Authorization' in this.headers,
})
}
private _initRealtimeClient(options: RealtimeClientOptions) {
return new RealtimeClient(this.realtimeUrl, {
...options,
params: { ...{ apikey: this.supabaseKey }, ...options?.params },
})
}
private _listenForAuthEvents() {
let data = this.auth.onAuthStateChange((event, session) => {
this._handleTokenChanged(event, 'CLIENT', session?.access_token)
})
return data
}
private _handleTokenChanged(
event: AuthChangeEvent,
source: 'CLIENT' | 'STORAGE',
token?: string
) {
if (
(event === 'TOKEN_REFRESHED' || event === 'SIGNED_IN') &&
this.changedAccessToken !== token
) {
this.changedAccessToken = token
} else if (event === 'SIGNED_OUT') {
this.realtime.setAuth()
if (source == 'STORAGE') this.auth.signOut()
this.changedAccessToken = undefined
}
}
}

41
node_modules/@supabase/supabase-js/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,41 @@
import SupabaseClient from './SupabaseClient'
import type { GenericSchema, SupabaseClientOptions } from './lib/types'
export * from '@supabase/auth-js'
export type { User as AuthUser, Session as AuthSession } from '@supabase/auth-js'
export {
type PostgrestResponse,
type PostgrestSingleResponse,
type PostgrestMaybeSingleResponse,
PostgrestError,
} from '@supabase/postgrest-js'
export {
FunctionsHttpError,
FunctionsFetchError,
FunctionsRelayError,
FunctionsError,
type FunctionInvokeOptions,
FunctionRegion,
} from '@supabase/functions-js'
export * from '@supabase/realtime-js'
export { default as SupabaseClient } from './SupabaseClient'
export type { SupabaseClientOptions, QueryResult, QueryData, QueryError } from './lib/types'
/**
* Creates a new Supabase Client.
*/
export const createClient = <
Database = any,
SchemaName extends string & keyof Database = 'public' extends keyof Database
? 'public'
: string & keyof Database,
Schema extends GenericSchema = Database[SchemaName] extends GenericSchema
? Database[SchemaName]
: any
>(
supabaseUrl: string,
supabaseKey: string,
options?: SupabaseClientOptions<SchemaName>
): SupabaseClient<Database, SchemaName, Schema> => {
return new SupabaseClient<Database, SchemaName, Schema>(supabaseUrl, supabaseKey, options)
}

View File

@ -0,0 +1,8 @@
import { AuthClient } from '@supabase/auth-js'
import { SupabaseAuthClientOptions } from './types'
export class SupabaseAuthClient extends AuthClient {
constructor(options: SupabaseAuthClientOptions) {
super(options)
}
}

View File

@ -0,0 +1,35 @@
// constants.ts
import { RealtimeClientOptions } from '@supabase/realtime-js'
import { SupabaseAuthClientOptions } from './types'
import { version } from './version'
let JS_ENV = ''
// @ts-ignore
if (typeof Deno !== 'undefined') {
JS_ENV = 'deno'
} else if (typeof document !== 'undefined') {
JS_ENV = 'web'
} else if (typeof navigator !== 'undefined' && navigator.product === 'ReactNative') {
JS_ENV = 'react-native'
} else {
JS_ENV = 'node'
}
export const DEFAULT_HEADERS = { 'X-Client-Info': `supabase-js-${JS_ENV}/${version}` }
export const DEFAULT_GLOBAL_OPTIONS = {
headers: DEFAULT_HEADERS,
}
export const DEFAULT_DB_OPTIONS = {
schema: 'public',
}
export const DEFAULT_AUTH_OPTIONS: SupabaseAuthClientOptions = {
autoRefreshToken: true,
persistSession: true,
detectSessionInUrl: true,
flowType: 'implicit',
}
export const DEFAULT_REALTIME_OPTIONS: RealtimeClientOptions = {}

48
node_modules/@supabase/supabase-js/src/lib/fetch.ts generated vendored Normal file
View File

@ -0,0 +1,48 @@
// @ts-ignore
import nodeFetch, { Headers as NodeFetchHeaders } from '@supabase/node-fetch'
type Fetch = typeof fetch
export const resolveFetch = (customFetch?: Fetch): Fetch => {
let _fetch: Fetch
if (customFetch) {
_fetch = customFetch
} else if (typeof fetch === 'undefined') {
_fetch = nodeFetch as unknown as Fetch
} else {
_fetch = fetch
}
return (...args: Parameters<Fetch>) => _fetch(...args)
}
export const resolveHeadersConstructor = () => {
if (typeof Headers === 'undefined') {
return NodeFetchHeaders
}
return Headers
}
export const fetchWithAuth = (
supabaseKey: string,
getAccessToken: () => Promise<string | null>,
customFetch?: Fetch
): Fetch => {
const fetch = resolveFetch(customFetch)
const HeadersConstructor = resolveHeadersConstructor()
return async (input, init) => {
const accessToken = (await getAccessToken()) ?? supabaseKey
let headers = new HeadersConstructor(init?.headers)
if (!headers.has('apikey')) {
headers.set('apikey', supabaseKey)
}
if (!headers.has('Authorization')) {
headers.set('Authorization', `Bearer ${accessToken}`)
}
return fetch(input, { ...init, headers })
}
}

68
node_modules/@supabase/supabase-js/src/lib/helpers.ts generated vendored Normal file
View File

@ -0,0 +1,68 @@
// helpers.ts
import { SupabaseClientOptions } from './types'
export function uuid() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = (Math.random() * 16) | 0,
v = c == 'x' ? r : (r & 0x3) | 0x8
return v.toString(16)
})
}
export function stripTrailingSlash(url: string): string {
return url.replace(/\/$/, '')
}
export const isBrowser = () => typeof window !== 'undefined'
export function applySettingDefaults<
Database = any,
SchemaName extends string & keyof Database = 'public' extends keyof Database
? 'public'
: string & keyof Database
>(
options: SupabaseClientOptions<SchemaName>,
defaults: SupabaseClientOptions<any>
): Required<SupabaseClientOptions<SchemaName>> {
const {
db: dbOptions,
auth: authOptions,
realtime: realtimeOptions,
global: globalOptions,
} = options
const {
db: DEFAULT_DB_OPTIONS,
auth: DEFAULT_AUTH_OPTIONS,
realtime: DEFAULT_REALTIME_OPTIONS,
global: DEFAULT_GLOBAL_OPTIONS,
} = defaults
const result: Required<SupabaseClientOptions<SchemaName>> = {
db: {
...DEFAULT_DB_OPTIONS,
...dbOptions,
},
auth: {
...DEFAULT_AUTH_OPTIONS,
...authOptions,
},
realtime: {
...DEFAULT_REALTIME_OPTIONS,
...realtimeOptions,
},
global: {
...DEFAULT_GLOBAL_OPTIONS,
...globalOptions,
},
accessToken: async () => '',
}
if (options.accessToken) {
result.accessToken = options.accessToken
} else {
// hack around Required<>
delete (result as any).accessToken
}
return result
}

123
node_modules/@supabase/supabase-js/src/lib/types.ts generated vendored Normal file
View File

@ -0,0 +1,123 @@
import { AuthClient } from '@supabase/auth-js'
import { RealtimeClientOptions } from '@supabase/realtime-js'
import { PostgrestError } from '@supabase/postgrest-js'
type AuthClientOptions = ConstructorParameters<typeof AuthClient>[0]
export interface SupabaseAuthClientOptions extends AuthClientOptions {}
export type Fetch = typeof fetch
export type SupabaseClientOptions<SchemaName> = {
/**
* The Postgres schema which your tables belong to. Must be on the list of exposed schemas in Supabase. Defaults to `public`.
*/
db?: {
schema?: SchemaName
}
auth?: {
/**
* Automatically refreshes the token for logged-in users. Defaults to true.
*/
autoRefreshToken?: boolean
/**
* Optional key name used for storing tokens in local storage.
*/
storageKey?: string
/**
* Whether to persist a logged-in session to storage. Defaults to true.
*/
persistSession?: boolean
/**
* Detect a session from the URL. Used for OAuth login callbacks. Defaults to true.
*/
detectSessionInUrl?: boolean
/**
* A storage provider. Used to store the logged-in session.
*/
storage?: SupabaseAuthClientOptions['storage']
/**
* OAuth flow to use - defaults to implicit flow. PKCE is recommended for mobile and server-side applications.
*/
flowType?: SupabaseAuthClientOptions['flowType']
/**
* If debug messages for authentication client are emitted. Can be used to inspect the behavior of the library.
*/
debug?: SupabaseAuthClientOptions['debug']
/**
* Provide your own locking mechanism based on the environment. By default no locking is done at this time.
*
* @experimental
*/
lock?: SupabaseAuthClientOptions['lock']
}
/**
* Options passed to the realtime-js instance
*/
realtime?: RealtimeClientOptions
global?: {
/**
* A custom `fetch` implementation.
*/
fetch?: Fetch
/**
* Optional headers for initializing the client.
*/
headers?: Record<string, string>
}
/**
* Optional function for using a third-party authentication system with
* Supabase. The function should return an access token or ID token (JWT) by
* obtaining it from the third-party auth client library. Note that this
* function may be called concurrently and many times. Use memoization and
* locking techniques if this is not supported by the client libraries.
*
* When set, the `auth` namespace of the Supabase client cannot be used.
* Create another client if you wish to use Supabase Auth and third-party
* authentications concurrently in the same application.
*/
accessToken?: () => Promise<string | null>
}
export type GenericRelationship = {
foreignKeyName: string
columns: string[]
isOneToOne?: boolean
referencedRelation: string
referencedColumns: string[]
}
export type GenericTable = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericUpdatableView = GenericTable
export type GenericNonUpdatableView = {
Row: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericView = GenericUpdatableView | GenericNonUpdatableView
export type GenericFunction = {
Args: Record<string, unknown>
Returns: unknown
}
export type GenericSchema = {
Tables: Record<string, GenericTable>
Views: Record<string, GenericView>
Functions: Record<string, GenericFunction>
}
/**
* Helper types for query results.
*/
export type QueryResult<T> = T extends PromiseLike<infer U> ? U : never
export type QueryData<T> = T extends PromiseLike<{ data: infer U }> ? Exclude<U, null> : never
export type QueryError = PostgrestError

View File

@ -0,0 +1 @@
export const version = '2.49.1'

21
node_modules/@types/node/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) Microsoft Corporation.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

15
node_modules/@types/node/README.md generated vendored Normal file
View File

@ -0,0 +1,15 @@
# Installation
> `npm install --save @types/node`
# Summary
This package contains type definitions for node (https://nodejs.org/).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
### Additional Details
* Last updated: Sat, 01 Mar 2025 06:37:25 GMT
* Dependencies: [undici-types](https://npmjs.com/package/undici-types)
# Credits
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky).

1095
node_modules/@types/node/assert.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

8
node_modules/@types/node/assert/strict.d.ts generated vendored Normal file
View File

@ -0,0 +1,8 @@
declare module "assert/strict" {
import { strict } from "node:assert";
export = strict;
}
declare module "node:assert/strict" {
import { strict } from "node:assert";
export = strict;
}

541
node_modules/@types/node/async_hooks.d.ts generated vendored Normal file
View File

@ -0,0 +1,541 @@
/**
* We strongly discourage the use of the `async_hooks` API.
* Other APIs that can cover most of its use cases include:
*
* * [`AsyncLocalStorage`](https://nodejs.org/docs/latest-v22.x/api/async_context.html#class-asynclocalstorage) tracks async context
* * [`process.getActiveResourcesInfo()`](https://nodejs.org/docs/latest-v22.x/api/process.html#processgetactiveresourcesinfo) tracks active resources
*
* The `node:async_hooks` module provides an API to track asynchronous resources.
* It can be accessed using:
*
* ```js
* import async_hooks from 'node:async_hooks';
* ```
* @experimental
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/async_hooks.js)
*/
declare module "async_hooks" {
/**
* ```js
* import { executionAsyncId } from 'node:async_hooks';
* import fs from 'node:fs';
*
* console.log(executionAsyncId()); // 1 - bootstrap
* const path = '.';
* fs.open(path, 'r', (err, fd) => {
* console.log(executionAsyncId()); // 6 - open()
* });
* ```
*
* The ID returned from `executionAsyncId()` is related to execution timing, not
* causality (which is covered by `triggerAsyncId()`):
*
* ```js
* const server = net.createServer((conn) => {
* // Returns the ID of the server, not of the new connection, because the
* // callback runs in the execution scope of the server's MakeCallback().
* async_hooks.executionAsyncId();
*
* }).listen(port, () => {
* // Returns the ID of a TickObject (process.nextTick()) because all
* // callbacks passed to .listen() are wrapped in a nextTick().
* async_hooks.executionAsyncId();
* });
* ```
*
* Promise contexts may not get precise `executionAsyncIds` by default.
* See the section on [promise execution tracking](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html#promise-execution-tracking).
* @since v8.1.0
* @return The `asyncId` of the current execution context. Useful to track when something calls.
*/
function executionAsyncId(): number;
/**
* Resource objects returned by `executionAsyncResource()` are most often internal
* Node.js handle objects with undocumented APIs. Using any functions or properties
* on the object is likely to crash your application and should be avoided.
*
* Using `executionAsyncResource()` in the top-level execution context will
* return an empty object as there is no handle or request object to use,
* but having an object representing the top-level can be helpful.
*
* ```js
* import { open } from 'node:fs';
* import { executionAsyncId, executionAsyncResource } from 'node:async_hooks';
*
* console.log(executionAsyncId(), executionAsyncResource()); // 1 {}
* open(new URL(import.meta.url), 'r', (err, fd) => {
* console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap
* });
* ```
*
* This can be used to implement continuation local storage without the
* use of a tracking `Map` to store the metadata:
*
* ```js
* import { createServer } from 'node:http';
* import {
* executionAsyncId,
* executionAsyncResource,
* createHook,
* } from 'node:async_hooks';
* const sym = Symbol('state'); // Private symbol to avoid pollution
*
* createHook({
* init(asyncId, type, triggerAsyncId, resource) {
* const cr = executionAsyncResource();
* if (cr) {
* resource[sym] = cr[sym];
* }
* },
* }).enable();
*
* const server = createServer((req, res) => {
* executionAsyncResource()[sym] = { state: req.url };
* setTimeout(function() {
* res.end(JSON.stringify(executionAsyncResource()[sym]));
* }, 100);
* }).listen(3000);
* ```
* @since v13.9.0, v12.17.0
* @return The resource representing the current execution. Useful to store data within the resource.
*/
function executionAsyncResource(): object;
/**
* ```js
* const server = net.createServer((conn) => {
* // The resource that caused (or triggered) this callback to be called
* // was that of the new connection. Thus the return value of triggerAsyncId()
* // is the asyncId of "conn".
* async_hooks.triggerAsyncId();
*
* }).listen(port, () => {
* // Even though all callbacks passed to .listen() are wrapped in a nextTick()
* // the callback itself exists because the call to the server's .listen()
* // was made. So the return value would be the ID of the server.
* async_hooks.triggerAsyncId();
* });
* ```
*
* Promise contexts may not get valid `triggerAsyncId`s by default. See
* the section on [promise execution tracking](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html#promise-execution-tracking).
* @return The ID of the resource responsible for calling the callback that is currently being executed.
*/
function triggerAsyncId(): number;
interface HookCallbacks {
/**
* Called when a class is constructed that has the possibility to emit an asynchronous event.
* @param asyncId A unique ID for the async resource
* @param type The type of the async resource
* @param triggerAsyncId The unique ID of the async resource in whose execution context this async resource was created
* @param resource Reference to the resource representing the async operation, needs to be released during destroy
*/
init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void;
/**
* When an asynchronous operation is initiated or completes a callback is called to notify the user.
* The before callback is called just before said callback is executed.
* @param asyncId the unique identifier assigned to the resource about to execute the callback.
*/
before?(asyncId: number): void;
/**
* Called immediately after the callback specified in `before` is completed.
*
* If an uncaught exception occurs during execution of the callback, then `after` will run after the `'uncaughtException'` event is emitted or a `domain`'s handler runs.
* @param asyncId the unique identifier assigned to the resource which has executed the callback.
*/
after?(asyncId: number): void;
/**
* Called when a promise has resolve() called. This may not be in the same execution id
* as the promise itself.
* @param asyncId the unique id for the promise that was resolve()d.
*/
promiseResolve?(asyncId: number): void;
/**
* Called after the resource corresponding to asyncId is destroyed
* @param asyncId a unique ID for the async resource
*/
destroy?(asyncId: number): void;
}
interface AsyncHook {
/**
* Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop.
*/
enable(): this;
/**
* Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled.
*/
disable(): this;
}
/**
* Registers functions to be called for different lifetime events of each async
* operation.
*
* The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the
* respective asynchronous event during a resource's lifetime.
*
* All callbacks are optional. For example, if only resource cleanup needs to
* be tracked, then only the `destroy` callback needs to be passed. The
* specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section.
*
* ```js
* import { createHook } from 'node:async_hooks';
*
* const asyncHook = createHook({
* init(asyncId, type, triggerAsyncId, resource) { },
* destroy(asyncId) { },
* });
* ```
*
* The callbacks will be inherited via the prototype chain:
*
* ```js
* class MyAsyncCallbacks {
* init(asyncId, type, triggerAsyncId, resource) { }
* destroy(asyncId) {}
* }
*
* class MyAddedCallbacks extends MyAsyncCallbacks {
* before(asyncId) { }
* after(asyncId) { }
* }
*
* const asyncHook = async_hooks.createHook(new MyAddedCallbacks());
* ```
*
* Because promises are asynchronous resources whose lifecycle is tracked
* via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises.
* @since v8.1.0
* @param callbacks The `Hook Callbacks` to register
* @return Instance used for disabling and enabling hooks
*/
function createHook(callbacks: HookCallbacks): AsyncHook;
interface AsyncResourceOptions {
/**
* The ID of the execution context that created this async event.
* @default executionAsyncId()
*/
triggerAsyncId?: number | undefined;
/**
* Disables automatic `emitDestroy` when the object is garbage collected.
* This usually does not need to be set (even if `emitDestroy` is called
* manually), unless the resource's `asyncId` is retrieved and the
* sensitive API's `emitDestroy` is called with it.
* @default false
*/
requireManualDestroy?: boolean | undefined;
}
/**
* The class `AsyncResource` is designed to be extended by the embedder's async
* resources. Using this, users can easily trigger the lifetime events of their
* own resources.
*
* The `init` hook will trigger when an `AsyncResource` is instantiated.
*
* The following is an overview of the `AsyncResource` API.
*
* ```js
* import { AsyncResource, executionAsyncId } from 'node:async_hooks';
*
* // AsyncResource() is meant to be extended. Instantiating a
* // new AsyncResource() also triggers init. If triggerAsyncId is omitted then
* // async_hook.executionAsyncId() is used.
* const asyncResource = new AsyncResource(
* type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false },
* );
*
* // Run a function in the execution context of the resource. This will
* // * establish the context of the resource
* // * trigger the AsyncHooks before callbacks
* // * call the provided function `fn` with the supplied arguments
* // * trigger the AsyncHooks after callbacks
* // * restore the original execution context
* asyncResource.runInAsyncScope(fn, thisArg, ...args);
*
* // Call AsyncHooks destroy callbacks.
* asyncResource.emitDestroy();
*
* // Return the unique ID assigned to the AsyncResource instance.
* asyncResource.asyncId();
*
* // Return the trigger ID for the AsyncResource instance.
* asyncResource.triggerAsyncId();
* ```
*/
class AsyncResource {
/**
* AsyncResource() is meant to be extended. Instantiating a
* new AsyncResource() also triggers init. If triggerAsyncId is omitted then
* async_hook.executionAsyncId() is used.
* @param type The type of async event.
* @param triggerAsyncId The ID of the execution context that created
* this async event (default: `executionAsyncId()`), or an
* AsyncResourceOptions object (since v9.3.0)
*/
constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions);
/**
* Binds the given function to the current execution context.
* @since v14.8.0, v12.19.0
* @param fn The function to bind to the current execution context.
* @param type An optional name to associate with the underlying `AsyncResource`.
*/
static bind<Func extends (this: ThisArg, ...args: any[]) => any, ThisArg>(
fn: Func,
type?: string,
thisArg?: ThisArg,
): Func;
/**
* Binds the given function to execute to this `AsyncResource`'s scope.
* @since v14.8.0, v12.19.0
* @param fn The function to bind to the current `AsyncResource`.
*/
bind<Func extends (...args: any[]) => any>(fn: Func): Func;
/**
* Call the provided function with the provided arguments in the execution context
* of the async resource. This will establish the context, trigger the AsyncHooks
* before callbacks, call the function, trigger the AsyncHooks after callbacks, and
* then restore the original execution context.
* @since v9.6.0
* @param fn The function to call in the execution context of this async resource.
* @param thisArg The receiver to be used for the function call.
* @param args Optional arguments to pass to the function.
*/
runInAsyncScope<This, Result>(
fn: (this: This, ...args: any[]) => Result,
thisArg?: This,
...args: any[]
): Result;
/**
* Call all `destroy` hooks. This should only ever be called once. An error will
* be thrown if it is called more than once. This **must** be manually called. If
* the resource is left to be collected by the GC then the `destroy` hooks will
* never be called.
* @return A reference to `asyncResource`.
*/
emitDestroy(): this;
/**
* @return The unique `asyncId` assigned to the resource.
*/
asyncId(): number;
/**
* @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor.
*/
triggerAsyncId(): number;
}
/**
* This class creates stores that stay coherent through asynchronous operations.
*
* While you can create your own implementation on top of the `node:async_hooks` module, `AsyncLocalStorage` should be preferred as it is a performant and memory
* safe implementation that involves significant optimizations that are non-obvious
* to implement.
*
* The following example uses `AsyncLocalStorage` to build a simple logger
* that assigns IDs to incoming HTTP requests and includes them in messages
* logged within each request.
*
* ```js
* import http from 'node:http';
* import { AsyncLocalStorage } from 'node:async_hooks';
*
* const asyncLocalStorage = new AsyncLocalStorage();
*
* function logWithId(msg) {
* const id = asyncLocalStorage.getStore();
* console.log(`${id !== undefined ? id : '-'}:`, msg);
* }
*
* let idSeq = 0;
* http.createServer((req, res) => {
* asyncLocalStorage.run(idSeq++, () => {
* logWithId('start');
* // Imagine any chain of async operations here
* setImmediate(() => {
* logWithId('finish');
* res.end();
* });
* });
* }).listen(8080);
*
* http.get('http://localhost:8080');
* http.get('http://localhost:8080');
* // Prints:
* // 0: start
* // 1: start
* // 0: finish
* // 1: finish
* ```
*
* Each instance of `AsyncLocalStorage` maintains an independent storage context.
* Multiple instances can safely exist simultaneously without risk of interfering
* with each other's data.
* @since v13.10.0, v12.17.0
*/
class AsyncLocalStorage<T> {
/**
* Binds the given function to the current execution context.
* @since v19.8.0
* @experimental
* @param fn The function to bind to the current execution context.
* @return A new function that calls `fn` within the captured execution context.
*/
static bind<Func extends (...args: any[]) => any>(fn: Func): Func;
/**
* Captures the current execution context and returns a function that accepts a
* function as an argument. Whenever the returned function is called, it
* calls the function passed to it within the captured context.
*
* ```js
* const asyncLocalStorage = new AsyncLocalStorage();
* const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot());
* const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore()));
* console.log(result); // returns 123
* ```
*
* AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple
* async context tracking purposes, for example:
*
* ```js
* class Foo {
* #runInAsyncScope = AsyncLocalStorage.snapshot();
*
* get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); }
* }
*
* const foo = asyncLocalStorage.run(123, () => new Foo());
* console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123
* ```
* @since v19.8.0
* @experimental
* @return A new function with the signature `(fn: (...args) : R, ...args) : R`.
*/
static snapshot(): <R, TArgs extends any[]>(fn: (...args: TArgs) => R, ...args: TArgs) => R;
/**
* Disables the instance of `AsyncLocalStorage`. All subsequent calls
* to `asyncLocalStorage.getStore()` will return `undefined` until `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again.
*
* When calling `asyncLocalStorage.disable()`, all current contexts linked to the
* instance will be exited.
*
* Calling `asyncLocalStorage.disable()` is required before the `asyncLocalStorage` can be garbage collected. This does not apply to stores
* provided by the `asyncLocalStorage`, as those objects are garbage collected
* along with the corresponding async resources.
*
* Use this method when the `asyncLocalStorage` is not in use anymore
* in the current process.
* @since v13.10.0, v12.17.0
* @experimental
*/
disable(): void;
/**
* Returns the current store.
* If called outside of an asynchronous context initialized by
* calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it
* returns `undefined`.
* @since v13.10.0, v12.17.0
*/
getStore(): T | undefined;
/**
* Runs a function synchronously within a context and returns its
* return value. The store is not accessible outside of the callback function.
* The store is accessible to any asynchronous operations created within the
* callback.
*
* The optional `args` are passed to the callback function.
*
* If the callback function throws an error, the error is thrown by `run()` too.
* The stacktrace is not impacted by this call and the context is exited.
*
* Example:
*
* ```js
* const store = { id: 2 };
* try {
* asyncLocalStorage.run(store, () => {
* asyncLocalStorage.getStore(); // Returns the store object
* setTimeout(() => {
* asyncLocalStorage.getStore(); // Returns the store object
* }, 200);
* throw new Error();
* });
* } catch (e) {
* asyncLocalStorage.getStore(); // Returns undefined
* // The error will be caught here
* }
* ```
* @since v13.10.0, v12.17.0
*/
run<R>(store: T, callback: () => R): R;
run<R, TArgs extends any[]>(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R;
/**
* Runs a function synchronously outside of a context and returns its
* return value. The store is not accessible within the callback function or
* the asynchronous operations created within the callback. Any `getStore()` call done within the callback function will always return `undefined`.
*
* The optional `args` are passed to the callback function.
*
* If the callback function throws an error, the error is thrown by `exit()` too.
* The stacktrace is not impacted by this call and the context is re-entered.
*
* Example:
*
* ```js
* // Within a call to run
* try {
* asyncLocalStorage.getStore(); // Returns the store object or value
* asyncLocalStorage.exit(() => {
* asyncLocalStorage.getStore(); // Returns undefined
* throw new Error();
* });
* } catch (e) {
* asyncLocalStorage.getStore(); // Returns the same object or value
* // The error will be caught here
* }
* ```
* @since v13.10.0, v12.17.0
* @experimental
*/
exit<R, TArgs extends any[]>(callback: (...args: TArgs) => R, ...args: TArgs): R;
/**
* Transitions into the context for the remainder of the current
* synchronous execution and then persists the store through any following
* asynchronous calls.
*
* Example:
*
* ```js
* const store = { id: 1 };
* // Replaces previous store with the given store object
* asyncLocalStorage.enterWith(store);
* asyncLocalStorage.getStore(); // Returns the store object
* someAsyncOperation(() => {
* asyncLocalStorage.getStore(); // Returns the same object
* });
* ```
*
* This transition will continue for the _entire_ synchronous execution.
* This means that if, for example, the context is entered within an event
* handler subsequent event handlers will also run within that context unless
* specifically bound to another context with an `AsyncResource`. That is why `run()` should be preferred over `enterWith()` unless there are strong reasons
* to use the latter method.
*
* ```js
* const store = { id: 1 };
*
* emitter.on('my-event', () => {
* asyncLocalStorage.enterWith(store);
* });
* emitter.on('my-event', () => {
* asyncLocalStorage.getStore(); // Returns the same object
* });
*
* asyncLocalStorage.getStore(); // Returns undefined
* emitter.emit('my-event');
* asyncLocalStorage.getStore(); // Returns the same object
* ```
* @since v13.11.0, v12.17.0
* @experimental
*/
enterWith(store: T): void;
}
}
declare module "node:async_hooks" {
export * from "async_hooks";
}

460
node_modules/@types/node/buffer.buffer.d.ts generated vendored Normal file
View File

@ -0,0 +1,460 @@
declare module "buffer" {
type ImplicitArrayBuffer<T extends WithImplicitCoercion<ArrayBufferLike>> = T extends
{ valueOf(): infer V extends ArrayBufferLike } ? V : T;
global {
interface BufferConstructor {
// see buffer.d.ts for implementation shared with all TypeScript versions
/**
* Allocates a new buffer containing the given {str}.
*
* @param str String to store in buffer.
* @param encoding encoding to use, optional. Default is 'utf8'
* @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead.
*/
new(str: string, encoding?: BufferEncoding): Buffer<ArrayBuffer>;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`).
*/
new(size: number): Buffer<ArrayBuffer>;
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
*/
new(array: ArrayLike<number>): Buffer<ArrayBuffer>;
/**
* Produces a Buffer backed by the same allocated memory as
* the given {ArrayBuffer}/{SharedArrayBuffer}.
*
* @param arrayBuffer The ArrayBuffer with which to share memory.
* @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead.
*/
new<TArrayBuffer extends ArrayBufferLike = ArrayBuffer>(arrayBuffer: TArrayBuffer): Buffer<TArrayBuffer>;
/**
* Allocates a new `Buffer` using an `array` of bytes in the range `0` `255`.
* Array entries outside that range will be truncated to fit into it.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'.
* const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
* ```
*
* If `array` is an `Array`-like object (that is, one with a `length` property of
* type `number`), it is treated as if it is an array, unless it is a `Buffer` or
* a `Uint8Array`. This means all other `TypedArray` variants get treated as an
* `Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use
* `Buffer.copyBytesFrom()`.
*
* A `TypeError` will be thrown if `array` is not an `Array` or another type
* appropriate for `Buffer.from()` variants.
*
* `Buffer.from(array)` and `Buffer.from(string)` may also use the internal
* `Buffer` pool like `Buffer.allocUnsafe()` does.
* @since v5.10.0
*/
from(array: WithImplicitCoercion<ArrayLike<number>>): Buffer<ArrayBuffer>;
/**
* This creates a view of the `ArrayBuffer` without copying the underlying
* memory. For example, when passed a reference to the `.buffer` property of a
* `TypedArray` instance, the newly created `Buffer` will share the same
* allocated memory as the `TypedArray`'s underlying `ArrayBuffer`.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const arr = new Uint16Array(2);
*
* arr[0] = 5000;
* arr[1] = 4000;
*
* // Shares memory with `arr`.
* const buf = Buffer.from(arr.buffer);
*
* console.log(buf);
* // Prints: <Buffer 88 13 a0 0f>
*
* // Changing the original Uint16Array changes the Buffer also.
* arr[1] = 6000;
*
* console.log(buf);
* // Prints: <Buffer 88 13 70 17>
* ```
*
* The optional `byteOffset` and `length` arguments specify a memory range within
* the `arrayBuffer` that will be shared by the `Buffer`.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const ab = new ArrayBuffer(10);
* const buf = Buffer.from(ab, 0, 2);
*
* console.log(buf.length);
* // Prints: 2
* ```
*
* A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer` or a
* `SharedArrayBuffer` or another type appropriate for `Buffer.from()`
* variants.
*
* It is important to remember that a backing `ArrayBuffer` can cover a range
* of memory that extends beyond the bounds of a `TypedArray` view. A new
* `Buffer` created using the `buffer` property of a `TypedArray` may extend
* beyond the range of the `TypedArray`:
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const arrA = Uint8Array.from([0x63, 0x64, 0x65, 0x66]); // 4 elements
* const arrB = new Uint8Array(arrA.buffer, 1, 2); // 2 elements
* console.log(arrA.buffer === arrB.buffer); // true
*
* const buf = Buffer.from(arrB.buffer);
* console.log(buf);
* // Prints: <Buffer 63 64 65 66>
* ```
* @since v5.10.0
* @param arrayBuffer An `ArrayBuffer`, `SharedArrayBuffer`, for example the
* `.buffer` property of a `TypedArray`.
* @param byteOffset Index of first byte to expose. **Default:** `0`.
* @param length Number of bytes to expose. **Default:**
* `arrayBuffer.byteLength - byteOffset`.
*/
from<TArrayBuffer extends WithImplicitCoercion<ArrayBufferLike>>(
arrayBuffer: TArrayBuffer,
byteOffset?: number,
length?: number,
): Buffer<ImplicitArrayBuffer<TArrayBuffer>>;
/**
* Creates a new `Buffer` containing `string`. The `encoding` parameter identifies
* the character encoding to be used when converting `string` into bytes.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf1 = Buffer.from('this is a tést');
* const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
*
* console.log(buf1.toString());
* // Prints: this is a tést
* console.log(buf2.toString());
* // Prints: this is a tést
* console.log(buf1.toString('latin1'));
* // Prints: this is a tést
* ```
*
* A `TypeError` will be thrown if `string` is not a string or another type
* appropriate for `Buffer.from()` variants.
*
* `Buffer.from(string)` may also use the internal `Buffer` pool like
* `Buffer.allocUnsafe()` does.
* @since v5.10.0
* @param string A string to encode.
* @param encoding The encoding of `string`. **Default:** `'utf8'`.
*/
from(string: WithImplicitCoercion<string>, encoding?: BufferEncoding): Buffer<ArrayBuffer>;
/**
* Creates a new Buffer using the passed {data}
* @param values to create a new Buffer
*/
of(...items: number[]): Buffer<ArrayBuffer>;
/**
* Returns a new `Buffer` which is the result of concatenating all the `Buffer` instances in the `list` together.
*
* If the list has no items, or if the `totalLength` is 0, then a new zero-length `Buffer` is returned.
*
* If `totalLength` is not provided, it is calculated from the `Buffer` instances
* in `list` by adding their lengths.
*
* If `totalLength` is provided, it is coerced to an unsigned integer. If the
* combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is
* truncated to `totalLength`. If the combined length of the `Buffer`s in `list` is
* less than `totalLength`, the remaining space is filled with zeros.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* // Create a single `Buffer` from a list of three `Buffer` instances.
*
* const buf1 = Buffer.alloc(10);
* const buf2 = Buffer.alloc(14);
* const buf3 = Buffer.alloc(18);
* const totalLength = buf1.length + buf2.length + buf3.length;
*
* console.log(totalLength);
* // Prints: 42
*
* const bufA = Buffer.concat([buf1, buf2, buf3], totalLength);
*
* console.log(bufA);
* // Prints: <Buffer 00 00 00 00 ...>
* console.log(bufA.length);
* // Prints: 42
* ```
*
* `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does.
* @since v0.7.11
* @param list List of `Buffer` or {@link Uint8Array} instances to concatenate.
* @param totalLength Total length of the `Buffer` instances in `list` when concatenated.
*/
concat(list: readonly Uint8Array[], totalLength?: number): Buffer<ArrayBuffer>;
/**
* Copies the underlying memory of `view` into a new `Buffer`.
*
* ```js
* const u16 = new Uint16Array([0, 0xffff]);
* const buf = Buffer.copyBytesFrom(u16, 1, 1);
* u16[1] = 0;
* console.log(buf.length); // 2
* console.log(buf[0]); // 255
* console.log(buf[1]); // 255
* ```
* @since v19.8.0
* @param view The {TypedArray} to copy.
* @param [offset=0] The starting offset within `view`.
* @param [length=view.length - offset] The number of elements from `view` to copy.
*/
copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer<ArrayBuffer>;
/**
* Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf = Buffer.alloc(5);
*
* console.log(buf);
* // Prints: <Buffer 00 00 00 00 00>
* ```
*
* If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown.
*
* If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf = Buffer.alloc(5, 'a');
*
* console.log(buf);
* // Prints: <Buffer 61 61 61 61 61>
* ```
*
* If both `fill` and `encoding` are specified, the allocated `Buffer` will be
* initialized by calling `buf.fill(fill, encoding)`.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
*
* console.log(buf);
* // Prints: <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
* ```
*
* Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance
* contents will never contain sensitive data from previous allocations, including
* data that might not have been allocated for `Buffer`s.
*
* A `TypeError` will be thrown if `size` is not a number.
* @since v5.10.0
* @param size The desired length of the new `Buffer`.
* @param [fill=0] A value to pre-fill the new `Buffer` with.
* @param [encoding='utf8'] If `fill` is a string, this is its encoding.
*/
alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer<ArrayBuffer>;
/**
* Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown.
*
* The underlying memory for `Buffer` instances created in this way is _not_
* _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf = Buffer.allocUnsafe(10);
*
* console.log(buf);
* // Prints (contents may vary): <Buffer a0 8b 28 3f 01 00 00 00 50 32>
*
* buf.fill(0);
*
* console.log(buf);
* // Prints: <Buffer 00 00 00 00 00 00 00 00 00 00>
* ```
*
* A `TypeError` will be thrown if `size` is not a number.
*
* The `Buffer` module pre-allocates an internal `Buffer` instance of
* size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`,
* and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two).
*
* Use of this pre-allocated internal memory pool is a key difference between
* calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
* Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less
* than or equal to half `Buffer.poolSize`. The
* difference is subtle but can be important when an application requires the
* additional performance that `Buffer.allocUnsafe()` provides.
* @since v5.10.0
* @param size The desired length of the new `Buffer`.
*/
allocUnsafe(size: number): Buffer<ArrayBuffer>;
/**
* Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if
* `size` is 0.
*
* The underlying memory for `Buffer` instances created in this way is _not_
* _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize
* such `Buffer` instances with zeroes.
*
* When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
* allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This
* allows applications to avoid the garbage collection overhead of creating many
* individually allocated `Buffer` instances. This approach improves both
* performance and memory usage by eliminating the need to track and clean up as
* many individual `ArrayBuffer` objects.
*
* However, in the case where a developer may need to retain a small chunk of
* memory from a pool for an indeterminate amount of time, it may be appropriate
* to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and
* then copying out the relevant bits.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* // Need to keep around a few small chunks of memory.
* const store = [];
*
* socket.on('readable', () => {
* let data;
* while (null !== (data = readable.read())) {
* // Allocate for retained data.
* const sb = Buffer.allocUnsafeSlow(10);
*
* // Copy the data into the new allocation.
* data.copy(sb, 0, 0, 10);
*
* store.push(sb);
* }
* });
* ```
*
* A `TypeError` will be thrown if `size` is not a number.
* @since v5.12.0
* @param size The desired length of the new `Buffer`.
*/
allocUnsafeSlow(size: number): Buffer<ArrayBuffer>;
}
interface Buffer<TArrayBuffer extends ArrayBufferLike = ArrayBufferLike> extends Uint8Array<TArrayBuffer> {
// see buffer.d.ts for implementation shared with all TypeScript versions
/**
* Returns a new `Buffer` that references the same memory as the original, but
* offset and cropped by the `start` and `end` indices.
*
* This method is not compatible with the `Uint8Array.prototype.slice()`,
* which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf = Buffer.from('buffer');
*
* const copiedBuf = Uint8Array.prototype.slice.call(buf);
* copiedBuf[0]++;
* console.log(copiedBuf.toString());
* // Prints: cuffer
*
* console.log(buf.toString());
* // Prints: buffer
*
* // With buf.slice(), the original buffer is modified.
* const notReallyCopiedBuf = buf.slice();
* notReallyCopiedBuf[0]++;
* console.log(notReallyCopiedBuf.toString());
* // Prints: cuffer
* console.log(buf.toString());
* // Also prints: cuffer (!)
* ```
* @since v0.3.0
* @deprecated Use `subarray` instead.
* @param [start=0] Where the new `Buffer` will start.
* @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
*/
slice(start?: number, end?: number): Buffer<ArrayBuffer>;
/**
* Returns a new `Buffer` that references the same memory as the original, but
* offset and cropped by the `start` and `end` indices.
*
* Specifying `end` greater than `buf.length` will return the same result as
* that of `end` equal to `buf.length`.
*
* This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray).
*
* Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte
* // from the original `Buffer`.
*
* const buf1 = Buffer.allocUnsafe(26);
*
* for (let i = 0; i < 26; i++) {
* // 97 is the decimal ASCII value for 'a'.
* buf1[i] = i + 97;
* }
*
* const buf2 = buf1.subarray(0, 3);
*
* console.log(buf2.toString('ascii', 0, buf2.length));
* // Prints: abc
*
* buf1[0] = 33;
*
* console.log(buf2.toString('ascii', 0, buf2.length));
* // Prints: !bc
* ```
*
* Specifying negative indexes causes the slice to be generated relative to the
* end of `buf` rather than the beginning.
*
* ```js
* import { Buffer } from 'node:buffer';
*
* const buf = Buffer.from('buffer');
*
* console.log(buf.subarray(-6, -1).toString());
* // Prints: buffe
* // (Equivalent to buf.subarray(0, 5).)
*
* console.log(buf.subarray(-6, -2).toString());
* // Prints: buff
* // (Equivalent to buf.subarray(0, 4).)
*
* console.log(buf.subarray(-5, -2).toString());
* // Prints: uff
* // (Equivalent to buf.subarray(1, 4).)
* ```
* @since v3.0.0
* @param [start=0] Where the new `Buffer` will start.
* @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
*/
subarray(start?: number, end?: number): Buffer<TArrayBuffer>;
}
}
/** @deprecated Use `Buffer.allocUnsafeSlow()` instead. */
var SlowBuffer: {
/** @deprecated Use `Buffer.allocUnsafeSlow()` instead. */
new(size: number): Buffer<ArrayBuffer>;
prototype: Buffer;
};
}

1926
node_modules/@types/node/buffer.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

1549
node_modules/@types/node/child_process.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

579
node_modules/@types/node/cluster.d.ts generated vendored Normal file
View File

@ -0,0 +1,579 @@
/**
* Clusters of Node.js processes can be used to run multiple instances of Node.js
* that can distribute workloads among their application threads. When process isolation
* is not needed, use the [`worker_threads`](https://nodejs.org/docs/latest-v22.x/api/worker_threads.html)
* module instead, which allows running multiple application threads within a single Node.js instance.
*
* The cluster module allows easy creation of child processes that all share
* server ports.
*
* ```js
* import cluster from 'node:cluster';
* import http from 'node:http';
* import { availableParallelism } from 'node:os';
* import process from 'node:process';
*
* const numCPUs = availableParallelism();
*
* if (cluster.isPrimary) {
* console.log(`Primary ${process.pid} is running`);
*
* // Fork workers.
* for (let i = 0; i < numCPUs; i++) {
* cluster.fork();
* }
*
* cluster.on('exit', (worker, code, signal) => {
* console.log(`worker ${worker.process.pid} died`);
* });
* } else {
* // Workers can share any TCP connection
* // In this case it is an HTTP server
* http.createServer((req, res) => {
* res.writeHead(200);
* res.end('hello world\n');
* }).listen(8000);
*
* console.log(`Worker ${process.pid} started`);
* }
* ```
*
* Running Node.js will now share port 8000 between the workers:
*
* ```console
* $ node server.js
* Primary 3596 is running
* Worker 4324 started
* Worker 4520 started
* Worker 6056 started
* Worker 5644 started
* ```
*
* On Windows, it is not yet possible to set up a named pipe server in a worker.
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/cluster.js)
*/
declare module "cluster" {
import * as child from "node:child_process";
import EventEmitter = require("node:events");
import * as net from "node:net";
type SerializationType = "json" | "advanced";
export interface ClusterSettings {
/**
* List of string arguments passed to the Node.js executable.
* @default process.execArgv
*/
execArgv?: string[] | undefined;
/**
* File path to worker file.
* @default process.argv[1]
*/
exec?: string | undefined;
/**
* String arguments passed to worker.
* @default process.argv.slice(2)
*/
args?: string[] | undefined;
/**
* Whether or not to send output to parent's stdio.
* @default false
*/
silent?: boolean | undefined;
/**
* Configures the stdio of forked processes. Because the cluster module relies on IPC to function, this configuration must
* contain an `'ipc'` entry. When this option is provided, it overrides `silent`. See [`child_prcess.spawn()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processspawncommand-args-options)'s
* [`stdio`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#optionsstdio).
*/
stdio?: any[] | undefined;
/**
* Sets the user identity of the process. (See [`setuid(2)`](https://man7.org/linux/man-pages/man2/setuid.2.html).)
*/
uid?: number | undefined;
/**
* Sets the group identity of the process. (See [`setgid(2)`](https://man7.org/linux/man-pages/man2/setgid.2.html).)
*/
gid?: number | undefined;
/**
* Sets inspector port of worker. This can be a number, or a function that takes no arguments and returns a number.
* By default each worker gets its own port, incremented from the primary's `process.debugPort`.
*/
inspectPort?: number | (() => number) | undefined;
/**
* Specify the kind of serialization used for sending messages between processes. Possible values are `'json'` and `'advanced'`.
* See [Advanced serialization for `child_process`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#advanced-serialization) for more details.
* @default false
*/
serialization?: SerializationType | undefined;
/**
* Current working directory of the worker process.
* @default undefined (inherits from parent process)
*/
cwd?: string | undefined;
/**
* Hide the forked processes console window that would normally be created on Windows systems.
* @default false
*/
windowsHide?: boolean | undefined;
}
export interface Address {
address: string;
port: number;
/**
* The `addressType` is one of:
*
* * `4` (TCPv4)
* * `6` (TCPv6)
* * `-1` (Unix domain socket)
* * `'udp4'` or `'udp6'` (UDPv4 or UDPv6)
*/
addressType: 4 | 6 | -1 | "udp4" | "udp6";
}
/**
* A `Worker` object contains all public information and method about a worker.
* In the primary it can be obtained using `cluster.workers`. In a worker
* it can be obtained using `cluster.worker`.
* @since v0.7.0
*/
export class Worker extends EventEmitter {
/**
* Each new worker is given its own unique id, this id is stored in the `id`.
*
* While a worker is alive, this is the key that indexes it in `cluster.workers`.
* @since v0.8.0
*/
id: number;
/**
* All workers are created using [`child_process.fork()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processforkmodulepath-args-options), the returned object
* from this function is stored as `.process`. In a worker, the global `process` is stored.
*
* See: [Child Process module](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processforkmodulepath-args-options).
*
* Workers will call `process.exit(0)` if the `'disconnect'` event occurs
* on `process` and `.exitedAfterDisconnect` is not `true`. This protects against
* accidental disconnection.
* @since v0.7.0
*/
process: child.ChildProcess;
/**
* Send a message to a worker or primary, optionally with a handle.
*
* In the primary, this sends a message to a specific worker. It is identical to [`ChildProcess.send()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#subprocesssendmessage-sendhandle-options-callback).
*
* In a worker, this sends a message to the primary. It is identical to `process.send()`.
*
* This example will echo back all messages from the primary:
*
* ```js
* if (cluster.isPrimary) {
* const worker = cluster.fork();
* worker.send('hi there');
*
* } else if (cluster.isWorker) {
* process.on('message', (msg) => {
* process.send(msg);
* });
* }
* ```
* @since v0.7.0
* @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles.
*/
send(message: child.Serializable, callback?: (error: Error | null) => void): boolean;
send(
message: child.Serializable,
sendHandle: child.SendHandle,
callback?: (error: Error | null) => void,
): boolean;
send(
message: child.Serializable,
sendHandle: child.SendHandle,
options?: child.MessageOptions,
callback?: (error: Error | null) => void,
): boolean;
/**
* This function will kill the worker. In the primary worker, it does this by
* disconnecting the `worker.process`, and once disconnected, killing with `signal`. In the worker, it does it by killing the process with `signal`.
*
* The `kill()` function kills the worker process without waiting for a graceful
* disconnect, it has the same behavior as `worker.process.kill()`.
*
* This method is aliased as `worker.destroy()` for backwards compatibility.
*
* In a worker, `process.kill()` exists, but it is not this function;
* it is [`kill()`](https://nodejs.org/docs/latest-v22.x/api/process.html#processkillpid-signal).
* @since v0.9.12
* @param [signal='SIGTERM'] Name of the kill signal to send to the worker process.
*/
kill(signal?: string): void;
destroy(signal?: string): void;
/**
* In a worker, this function will close all servers, wait for the `'close'` event
* on those servers, and then disconnect the IPC channel.
*
* In the primary, an internal message is sent to the worker causing it to call `.disconnect()` on itself.
*
* Causes `.exitedAfterDisconnect` to be set.
*
* After a server is closed, it will no longer accept new connections,
* but connections may be accepted by any other listening worker. Existing
* connections will be allowed to close as usual. When no more connections exist,
* see `server.close()`, the IPC channel to the worker will close allowing it
* to die gracefully.
*
* The above applies _only_ to server connections, client connections are not
* automatically closed by workers, and disconnect does not wait for them to close
* before exiting.
*
* In a worker, `process.disconnect` exists, but it is not this function;
* it is `disconnect()`.
*
* Because long living server connections may block workers from disconnecting, it
* may be useful to send a message, so application specific actions may be taken to
* close them. It also may be useful to implement a timeout, killing a worker if
* the `'disconnect'` event has not been emitted after some time.
*
* ```js
* import net from 'node:net';
*
* if (cluster.isPrimary) {
* const worker = cluster.fork();
* let timeout;
*
* worker.on('listening', (address) => {
* worker.send('shutdown');
* worker.disconnect();
* timeout = setTimeout(() => {
* worker.kill();
* }, 2000);
* });
*
* worker.on('disconnect', () => {
* clearTimeout(timeout);
* });
*
* } else if (cluster.isWorker) {
* const server = net.createServer((socket) => {
* // Connections never end
* });
*
* server.listen(8000);
*
* process.on('message', (msg) => {
* if (msg === 'shutdown') {
* // Initiate graceful close of any connections to server
* }
* });
* }
* ```
* @since v0.7.7
* @return A reference to `worker`.
*/
disconnect(): void;
/**
* This function returns `true` if the worker is connected to its primary via its
* IPC channel, `false` otherwise. A worker is connected to its primary after it
* has been created. It is disconnected after the `'disconnect'` event is emitted.
* @since v0.11.14
*/
isConnected(): boolean;
/**
* This function returns `true` if the worker's process has terminated (either
* because of exiting or being signaled). Otherwise, it returns `false`.
*
* ```js
* import cluster from 'node:cluster';
* import http from 'node:http';
* import { availableParallelism } from 'node:os';
* import process from 'node:process';
*
* const numCPUs = availableParallelism();
*
* if (cluster.isPrimary) {
* console.log(`Primary ${process.pid} is running`);
*
* // Fork workers.
* for (let i = 0; i < numCPUs; i++) {
* cluster.fork();
* }
*
* cluster.on('fork', (worker) => {
* console.log('worker is dead:', worker.isDead());
* });
*
* cluster.on('exit', (worker, code, signal) => {
* console.log('worker is dead:', worker.isDead());
* });
* } else {
* // Workers can share any TCP connection. In this case, it is an HTTP server.
* http.createServer((req, res) => {
* res.writeHead(200);
* res.end(`Current process\n ${process.pid}`);
* process.kill(process.pid);
* }).listen(8000);
* }
* ```
* @since v0.11.14
*/
isDead(): boolean;
/**
* This property is `true` if the worker exited due to `.disconnect()`.
* If the worker exited any other way, it is `false`. If the
* worker has not exited, it is `undefined`.
*
* The boolean `worker.exitedAfterDisconnect` allows distinguishing between
* voluntary and accidental exit, the primary may choose not to respawn a worker
* based on this value.
*
* ```js
* cluster.on('exit', (worker, code, signal) => {
* if (worker.exitedAfterDisconnect === true) {
* console.log('Oh, it was just voluntary no need to worry');
* }
* });
*
* // kill worker
* worker.kill();
* ```
* @since v6.0.0
*/
exitedAfterDisconnect: boolean;
/**
* events.EventEmitter
* 1. disconnect
* 2. error
* 3. exit
* 4. listening
* 5. message
* 6. online
*/
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: "disconnect", listener: () => void): this;
addListener(event: "error", listener: (error: Error) => void): this;
addListener(event: "exit", listener: (code: number, signal: string) => void): this;
addListener(event: "listening", listener: (address: Address) => void): this;
addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
addListener(event: "online", listener: () => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: "disconnect"): boolean;
emit(event: "error", error: Error): boolean;
emit(event: "exit", code: number, signal: string): boolean;
emit(event: "listening", address: Address): boolean;
emit(event: "message", message: any, handle: net.Socket | net.Server): boolean;
emit(event: "online"): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: "disconnect", listener: () => void): this;
on(event: "error", listener: (error: Error) => void): this;
on(event: "exit", listener: (code: number, signal: string) => void): this;
on(event: "listening", listener: (address: Address) => void): this;
on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
on(event: "online", listener: () => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: "disconnect", listener: () => void): this;
once(event: "error", listener: (error: Error) => void): this;
once(event: "exit", listener: (code: number, signal: string) => void): this;
once(event: "listening", listener: (address: Address) => void): this;
once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
once(event: "online", listener: () => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: "disconnect", listener: () => void): this;
prependListener(event: "error", listener: (error: Error) => void): this;
prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
prependListener(event: "listening", listener: (address: Address) => void): this;
prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
prependListener(event: "online", listener: () => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: "disconnect", listener: () => void): this;
prependOnceListener(event: "error", listener: (error: Error) => void): this;
prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
prependOnceListener(event: "listening", listener: (address: Address) => void): this;
prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
prependOnceListener(event: "online", listener: () => void): this;
}
export interface Cluster extends EventEmitter {
disconnect(callback?: () => void): void;
/**
* Spawn a new worker process.
*
* This can only be called from the primary process.
* @param env Key/value pairs to add to worker process environment.
* @since v0.6.0
*/
fork(env?: any): Worker;
/** @deprecated since v16.0.0 - use isPrimary. */
readonly isMaster: boolean;
/**
* True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID`
* is undefined, then `isPrimary` is `true`.
* @since v16.0.0
*/
readonly isPrimary: boolean;
/**
* True if the process is not a primary (it is the negation of `cluster.isPrimary`).
* @since v0.6.0
*/
readonly isWorker: boolean;
/**
* The scheduling policy, either `cluster.SCHED_RR` for round-robin or `cluster.SCHED_NONE` to leave it to the operating system. This is a
* global setting and effectively frozen once either the first worker is spawned, or [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings)
* is called, whichever comes first.
*
* `SCHED_RR` is the default on all operating systems except Windows. Windows will change to `SCHED_RR` once libuv is able to effectively distribute
* IOCP handles without incurring a large performance hit.
*
* `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid values are `'rr'` and `'none'`.
* @since v0.11.2
*/
schedulingPolicy: number;
/**
* After calling [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings)
* (or [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv)) this settings object will contain
* the settings, including the default values.
*
* This object is not intended to be changed or set manually.
* @since v0.7.1
*/
readonly settings: ClusterSettings;
/** @deprecated since v16.0.0 - use [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings) instead. */
setupMaster(settings?: ClusterSettings): void;
/**
* `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in `cluster.settings`.
*
* Any settings changes only affect future calls to [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv)
* and have no effect on workers that are already running.
*
* The only attribute of a worker that cannot be set via `.setupPrimary()` is the `env` passed to
* [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv).
*
* The defaults above apply to the first call only; the defaults for later calls are the current values at the time of
* `cluster.setupPrimary()` is called.
*
* ```js
* import cluster from 'node:cluster';
*
* cluster.setupPrimary({
* exec: 'worker.js',
* args: ['--use', 'https'],
* silent: true,
* });
* cluster.fork(); // https worker
* cluster.setupPrimary({
* exec: 'worker.js',
* args: ['--use', 'http'],
* });
* cluster.fork(); // http worker
* ```
*
* This can only be called from the primary process.
* @since v16.0.0
*/
setupPrimary(settings?: ClusterSettings): void;
/**
* A reference to the current worker object. Not available in the primary process.
*
* ```js
* import cluster from 'node:cluster';
*
* if (cluster.isPrimary) {
* console.log('I am primary');
* cluster.fork();
* cluster.fork();
* } else if (cluster.isWorker) {
* console.log(`I am worker #${cluster.worker.id}`);
* }
* ```
* @since v0.7.0
*/
readonly worker?: Worker | undefined;
/**
* A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary process.
*
* A worker is removed from `cluster.workers` after the worker has disconnected _and_ exited. The order between these two events cannot be determined in advance. However, it
* is guaranteed that the removal from the `cluster.workers` list happens before the last `'disconnect'` or `'exit'` event is emitted.
*
* ```js
* import cluster from 'node:cluster';
*
* for (const worker of Object.values(cluster.workers)) {
* worker.send('big announcement to all workers');
* }
* ```
* @since v0.7.0
*/
readonly workers?: NodeJS.Dict<Worker> | undefined;
readonly SCHED_NONE: number;
readonly SCHED_RR: number;
/**
* events.EventEmitter
* 1. disconnect
* 2. exit
* 3. fork
* 4. listening
* 5. message
* 6. online
* 7. setup
*/
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: "disconnect", listener: (worker: Worker) => void): this;
addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
addListener(event: "fork", listener: (worker: Worker) => void): this;
addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
addListener(
event: "message",
listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
): this; // the handle is a net.Socket or net.Server object, or undefined.
addListener(event: "online", listener: (worker: Worker) => void): this;
addListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: "disconnect", worker: Worker): boolean;
emit(event: "exit", worker: Worker, code: number, signal: string): boolean;
emit(event: "fork", worker: Worker): boolean;
emit(event: "listening", worker: Worker, address: Address): boolean;
emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
emit(event: "online", worker: Worker): boolean;
emit(event: "setup", settings: ClusterSettings): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: "disconnect", listener: (worker: Worker) => void): this;
on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
on(event: "fork", listener: (worker: Worker) => void): this;
on(event: "listening", listener: (worker: Worker, address: Address) => void): this;
on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
on(event: "online", listener: (worker: Worker) => void): this;
on(event: "setup", listener: (settings: ClusterSettings) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: "disconnect", listener: (worker: Worker) => void): this;
once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
once(event: "fork", listener: (worker: Worker) => void): this;
once(event: "listening", listener: (worker: Worker, address: Address) => void): this;
once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
once(event: "online", listener: (worker: Worker) => void): this;
once(event: "setup", listener: (settings: ClusterSettings) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: "disconnect", listener: (worker: Worker) => void): this;
prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
prependListener(event: "fork", listener: (worker: Worker) => void): this;
prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
// the handle is a net.Socket or net.Server object, or undefined.
prependListener(
event: "message",
listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void,
): this;
prependListener(event: "online", listener: (worker: Worker) => void): this;
prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this;
prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
prependOnceListener(event: "fork", listener: (worker: Worker) => void): this;
prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
// the handle is a net.Socket or net.Server object, or undefined.
prependOnceListener(
event: "message",
listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
): this;
prependOnceListener(event: "online", listener: (worker: Worker) => void): this;
prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
}
const cluster: Cluster;
export default cluster;
}
declare module "node:cluster" {
export * from "cluster";
export { default as default } from "cluster";
}

16
node_modules/@types/node/compatibility/disposable.d.ts generated vendored Normal file
View File

@ -0,0 +1,16 @@
// Polyfills for the explicit resource management types added in TypeScript 5.2.
// TODO: remove once this package no longer supports TS 5.1, and replace with a
// <reference> to TypeScript's disposable library in index.d.ts.
interface SymbolConstructor {
readonly dispose: unique symbol;
readonly asyncDispose: unique symbol;
}
interface Disposable {
[Symbol.dispose](): void;
}
interface AsyncDisposable {
[Symbol.asyncDispose](): PromiseLike<void>;
}

Some files were not shown because too many files have changed in this diff Show More