main repo

This commit is contained in:
Basilosaurusrex
2025-11-24 18:09:40 +01:00
parent b636ee5e70
commit f027651f9b
34146 changed files with 4436636 additions and 0 deletions

View File

@@ -0,0 +1,279 @@
// @ts-ignore
import nodeFetch from '@supabase/node-fetch'
import type {
Fetch,
PostgrestSingleResponse,
PostgrestResponseSuccess,
CheckMatchingArrayTypes,
MergePartialResult,
IsValidResultOverride,
} from './types'
import PostgrestError from './PostgrestError'
import { ContainsNull } from './select-query-parser/types'
export default abstract class PostgrestBuilder<Result, ThrowOnError extends boolean = false>
implements
PromiseLike<
ThrowOnError extends true ? PostgrestResponseSuccess<Result> : PostgrestSingleResponse<Result>
>
{
protected method: 'GET' | 'HEAD' | 'POST' | 'PATCH' | 'DELETE'
protected url: URL
protected headers: Record<string, string>
protected schema?: string
protected body?: unknown
protected shouldThrowOnError = false
protected signal?: AbortSignal
protected fetch: Fetch
protected isMaybeSingle: boolean
constructor(builder: PostgrestBuilder<Result>) {
this.method = builder.method
this.url = builder.url
this.headers = builder.headers
this.schema = builder.schema
this.body = builder.body
this.shouldThrowOnError = builder.shouldThrowOnError
this.signal = builder.signal
this.isMaybeSingle = builder.isMaybeSingle
if (builder.fetch) {
this.fetch = builder.fetch
} else if (typeof fetch === 'undefined') {
this.fetch = nodeFetch
} else {
this.fetch = fetch
}
}
/**
* If there's an error with the query, throwOnError will reject the promise by
* throwing the error instead of returning it as part of a successful response.
*
* {@link https://github.com/supabase/supabase-js/issues/92}
*/
throwOnError(): this & PostgrestBuilder<Result, true> {
this.shouldThrowOnError = true
return this as this & PostgrestBuilder<Result, true>
}
/**
* Set an HTTP header for the request.
*/
setHeader(name: string, value: string): this {
this.headers = { ...this.headers }
this.headers[name] = value
return this
}
then<
TResult1 = ThrowOnError extends true
? PostgrestResponseSuccess<Result>
: PostgrestSingleResponse<Result>,
TResult2 = never
>(
onfulfilled?:
| ((
value: ThrowOnError extends true
? PostgrestResponseSuccess<Result>
: PostgrestSingleResponse<Result>
) => TResult1 | PromiseLike<TResult1>)
| undefined
| null,
onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null
): PromiseLike<TResult1 | TResult2> {
// https://postgrest.org/en/stable/api.html#switching-schemas
if (this.schema === undefined) {
// skip
} else if (['GET', 'HEAD'].includes(this.method)) {
this.headers['Accept-Profile'] = this.schema
} else {
this.headers['Content-Profile'] = this.schema
}
if (this.method !== 'GET' && this.method !== 'HEAD') {
this.headers['Content-Type'] = 'application/json'
}
// NOTE: Invoke w/o `this` to avoid illegal invocation error.
// https://github.com/supabase/postgrest-js/pull/247
const _fetch = this.fetch
let res = _fetch(this.url.toString(), {
method: this.method,
headers: this.headers,
body: JSON.stringify(this.body),
signal: this.signal,
}).then(async (res) => {
let error = null
let data = null
let count: number | null = null
let status = res.status
let statusText = res.statusText
if (res.ok) {
if (this.method !== 'HEAD') {
const body = await res.text()
if (body === '') {
// Prefer: return=minimal
} else if (this.headers['Accept'] === 'text/csv') {
data = body
} else if (
this.headers['Accept'] &&
this.headers['Accept'].includes('application/vnd.pgrst.plan+text')
) {
data = body
} else {
data = JSON.parse(body)
}
}
const countHeader = this.headers['Prefer']?.match(/count=(exact|planned|estimated)/)
const contentRange = res.headers.get('content-range')?.split('/')
if (countHeader && contentRange && contentRange.length > 1) {
count = parseInt(contentRange[1])
}
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.isMaybeSingle && this.method === 'GET' && Array.isArray(data)) {
if (data.length > 1) {
error = {
// https://github.com/PostgREST/postgrest/blob/a867d79c42419af16c18c3fb019eba8df992626f/src/PostgREST/Error.hs#L553
code: 'PGRST116',
details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
hint: null,
message: 'JSON object requested, multiple (or no) rows returned',
}
data = null
count = null
status = 406
statusText = 'Not Acceptable'
} else if (data.length === 1) {
data = data[0]
} else {
data = null
}
}
} else {
const body = await res.text()
try {
error = JSON.parse(body)
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (Array.isArray(error) && res.status === 404) {
data = []
error = null
status = 200
statusText = 'OK'
}
} catch {
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (res.status === 404 && body === '') {
status = 204
statusText = 'No Content'
} else {
error = {
message: body,
}
}
}
if (error && this.isMaybeSingle && error?.details?.includes('0 rows')) {
error = null
status = 200
statusText = 'OK'
}
if (error && this.shouldThrowOnError) {
throw new PostgrestError(error)
}
}
const postgrestResponse = {
error,
data,
count,
status,
statusText,
}
return postgrestResponse
})
if (!this.shouldThrowOnError) {
res = res.catch((fetchError) => ({
error: {
message: `${fetchError?.name ?? 'FetchError'}: ${fetchError?.message}`,
details: `${fetchError?.stack ?? ''}`,
hint: '',
code: `${fetchError?.code ?? ''}`,
},
data: null,
count: null,
status: 0,
statusText: '',
}))
}
return res.then(onfulfilled, onrejected)
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestBuilder<CheckMatchingArrayTypes<Result, NewResult>, ThrowOnError> {
/* istanbul ignore next */
return this as unknown as PostgrestBuilder<
CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
>
}
/**
* Override the type of the returned `data` field in the response.
*
* @typeParam NewResult - The new type to cast the response data to
* @typeParam Options - Optional type configuration (defaults to { merge: true })
* @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true)
* @example
* ```typescript
* // Merge with existing types (default behavior)
* const query = supabase
* .from('users')
* .select()
* .overrideTypes<{ custom_field: string }>()
*
* // Replace existing types completely
* const replaceQuery = supabase
* .from('users')
* .select()
* .overrideTypes<{ id: number; name: string }, { merge: false }>()
* ```
* @returns A PostgrestBuilder instance with the new type
*/
overrideTypes<
NewResult,
Options extends { merge?: boolean } = { merge: true }
>(): PostgrestBuilder<
IsValidResultOverride<Result, NewResult, false, false> extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? MergePartialResult<NewResult, NonNullable<Result>, Options> | null
: MergePartialResult<NewResult, Result, Options>
: CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
> {
return this as unknown as PostgrestBuilder<
IsValidResultOverride<Result, NewResult, false, false> extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? MergePartialResult<NewResult, NonNullable<Result>, Options> | null
: MergePartialResult<NewResult, Result, Options>
: CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
>
}
}

View File

@@ -0,0 +1,181 @@
import PostgrestQueryBuilder from './PostgrestQueryBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import PostgrestBuilder from './PostgrestBuilder'
import { DEFAULT_HEADERS } from './constants'
import { Fetch, GenericSchema } from './types'
/**
* PostgREST client.
*
* @typeParam Database - Types for the schema from the [type
* generator](https://supabase.com/docs/reference/javascript/next/typescript-support)
*
* @typeParam SchemaName - Postgres schema to switch to. Must be a string
* literal, the same one passed to the constructor. If the schema is not
* `"public"`, this must be supplied manually.
*/
export default class PostgrestClient<
Database = any,
SchemaName extends string & keyof Database = 'public' extends keyof Database
? 'public'
: string & keyof Database,
Schema extends GenericSchema = Database[SchemaName] extends GenericSchema
? Database[SchemaName]
: any
> {
url: string
headers: Record<string, string>
schemaName?: SchemaName
fetch?: Fetch
// TODO: Add back shouldThrowOnError once we figure out the typings
/**
* Creates a PostgREST client.
*
* @param url - URL of the PostgREST endpoint
* @param options - Named parameters
* @param options.headers - Custom headers
* @param options.schema - Postgres schema to switch to
* @param options.fetch - Custom fetch
*/
constructor(
url: string,
{
headers = {},
schema,
fetch,
}: {
headers?: Record<string, string>
schema?: SchemaName
fetch?: Fetch
} = {}
) {
this.url = url
this.headers = { ...DEFAULT_HEADERS, ...headers }
this.schemaName = schema
this.fetch = fetch
}
from<
TableName extends string & keyof Schema['Tables'],
Table extends Schema['Tables'][TableName]
>(relation: TableName): PostgrestQueryBuilder<Schema, Table, TableName>
from<ViewName extends string & keyof Schema['Views'], View extends Schema['Views'][ViewName]>(
relation: ViewName
): PostgrestQueryBuilder<Schema, View, ViewName>
/**
* Perform a query on a table or a view.
*
* @param relation - The table or view name to query
*/
from(relation: string): PostgrestQueryBuilder<Schema, any, any> {
const url = new URL(`${this.url}/${relation}`)
return new PostgrestQueryBuilder(url, {
headers: { ...this.headers },
schema: this.schemaName,
fetch: this.fetch,
})
}
/**
* Select a schema to query or perform an function (rpc) call.
*
* The schema needs to be on the list of exposed schemas inside Supabase.
*
* @param schema - The schema to query
*/
schema<DynamicSchema extends string & keyof Database>(
schema: DynamicSchema
): PostgrestClient<
Database,
DynamicSchema,
Database[DynamicSchema] extends GenericSchema ? Database[DynamicSchema] : any
> {
return new PostgrestClient(this.url, {
headers: this.headers,
schema,
fetch: this.fetch,
})
}
/**
* Perform a function call.
*
* @param fn - The function name to call
* @param args - The arguments to pass to the function call
* @param options - Named parameters
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
* @param options.get - When set to `true`, the function will be called with
* read-only access mode.
* @param options.count - Count algorithm to use to count rows returned by the
* function. Only applicable for [set-returning
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
rpc<FnName extends string & keyof Schema['Functions'], Fn extends Schema['Functions'][FnName]>(
fn: FnName,
args: Fn['Args'] = {},
{
head = false,
get = false,
count,
}: {
head?: boolean
get?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<
Schema,
Fn['Returns'] extends any[]
? Fn['Returns'][number] extends Record<string, unknown>
? Fn['Returns'][number]
: never
: never,
Fn['Returns'],
FnName,
null
> {
let method: 'HEAD' | 'GET' | 'POST'
const url = new URL(`${this.url}/rpc/${fn}`)
let body: unknown | undefined
if (head || get) {
method = head ? 'HEAD' : 'GET'
Object.entries(args)
// params with undefined value needs to be filtered out, otherwise it'll
// show up as `?param=undefined`
.filter(([_, value]) => value !== undefined)
// array values need special syntax
.map(([name, value]) => [name, Array.isArray(value) ? `{${value.join(',')}}` : `${value}`])
.forEach(([name, value]) => {
url.searchParams.append(name, value)
})
} else {
method = 'POST'
body = args
}
const headers = { ...this.headers }
if (count) {
headers['Prefer'] = `count=${count}`
}
return new PostgrestFilterBuilder({
method,
url,
headers,
schema: this.schemaName,
body,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<Fn['Returns']>)
}
}

View File

@@ -0,0 +1,18 @@
/**
* Error format
*
* {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes}
*/
export default class PostgrestError extends Error {
details: string
hint: string
code: string
constructor(context: { message: string; details: string; hint: string; code: string }) {
super(context.message)
this.name = 'PostgrestError'
this.details = context.details
this.hint = context.hint
this.code = context.code
}
}

View File

@@ -0,0 +1,592 @@
import PostgrestTransformBuilder from './PostgrestTransformBuilder'
import { JsonPathToAccessor, JsonPathToType } from './select-query-parser/utils'
import { GenericSchema } from './types'
type FilterOperator =
| 'eq'
| 'neq'
| 'gt'
| 'gte'
| 'lt'
| 'lte'
| 'like'
| 'ilike'
| 'is'
| 'in'
| 'cs'
| 'cd'
| 'sl'
| 'sr'
| 'nxl'
| 'nxr'
| 'adj'
| 'ov'
| 'fts'
| 'plfts'
| 'phfts'
| 'wfts'
export type IsStringOperator<Path extends string> = Path extends `${string}->>${string}`
? true
: false
// Match relationship filters with `table.column` syntax and resolve underlying
// column value. If not matched, fallback to generic type.
// TODO: Validate the relationship itself ala select-query-parser. Currently we
// assume that all tables have valid relationships to each other, despite
// nonexistent foreign keys.
type ResolveFilterValue<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
ColumnName extends string
> = ColumnName extends `${infer RelationshipTable}.${infer Remainder}`
? Remainder extends `${infer _}.${infer _}`
? ResolveFilterValue<Schema, Row, Remainder>
: ResolveFilterRelationshipValue<Schema, RelationshipTable, Remainder>
: ColumnName extends keyof Row
? Row[ColumnName]
: // If the column selection is a jsonpath like `data->value` or `data->>value` we attempt to match
// the expected type with the parsed custom json type
IsStringOperator<ColumnName> extends true
? string
: JsonPathToType<Row, JsonPathToAccessor<ColumnName>> extends infer JsonPathValue
? JsonPathValue extends never
? never
: JsonPathValue
: never
type ResolveFilterRelationshipValue<
Schema extends GenericSchema,
RelationshipTable extends string,
RelationshipColumn extends string
> = Schema['Tables'] & Schema['Views'] extends infer TablesAndViews
? RelationshipTable extends keyof TablesAndViews
? 'Row' extends keyof TablesAndViews[RelationshipTable]
? RelationshipColumn extends keyof TablesAndViews[RelationshipTable]['Row']
? TablesAndViews[RelationshipTable]['Row'][RelationshipColumn]
: unknown
: unknown
: unknown
: never
export default class PostgrestFilterBuilder<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
Result,
RelationName = unknown,
Relationships = unknown
> extends PostgrestTransformBuilder<Schema, Row, Result, RelationName, Relationships> {
/**
* Match only rows where `column` is equal to `value`.
*
* To check if the value of `column` is NULL, you should use `.is()` instead.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
eq<ColumnName extends string>(
column: ColumnName,
value: ResolveFilterValue<Schema, Row, ColumnName> extends never
? NonNullable<unknown>
: // We want to infer the type before wrapping it into a `NonNullable` to avoid too deep
// type resolution error
ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? NonNullable<ResolvedFilterValue>
: // We should never enter this case as all the branches are covered above
never
): this {
this.url.searchParams.append(column, `eq.${value}`)
return this
}
/**
* Match only rows where `column` is not equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
neq<ColumnName extends string>(
column: ColumnName,
value: ResolveFilterValue<Schema, Row, ColumnName> extends never
? unknown
: ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? ResolvedFilterValue
: never
): this {
this.url.searchParams.append(column, `neq.${value}`)
return this
}
gt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
gt(column: string, value: unknown): this
/**
* Match only rows where `column` is greater than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gt(column: string, value: unknown): this {
this.url.searchParams.append(column, `gt.${value}`)
return this
}
gte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
gte(column: string, value: unknown): this
/**
* Match only rows where `column` is greater than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gte(column: string, value: unknown): this {
this.url.searchParams.append(column, `gte.${value}`)
return this
}
lt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
lt(column: string, value: unknown): this
/**
* Match only rows where `column` is less than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lt(column: string, value: unknown): this {
this.url.searchParams.append(column, `lt.${value}`)
return this
}
lte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
lte(column: string, value: unknown): this
/**
* Match only rows where `column` is less than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lte(column: string, value: unknown): this {
this.url.searchParams.append(column, `lte.${value}`)
return this
}
like<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this
like(column: string, pattern: string): this
/**
* Match only rows where `column` matches `pattern` case-sensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
like(column: string, pattern: string): this {
this.url.searchParams.append(column, `like.${pattern}`)
return this
}
likeAllOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
likeAllOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches all of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAllOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `like(all).{${patterns.join(',')}}`)
return this
}
likeAnyOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
likeAnyOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches any of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAnyOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `like(any).{${patterns.join(',')}}`)
return this
}
ilike<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this
ilike(column: string, pattern: string): this
/**
* Match only rows where `column` matches `pattern` case-insensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
ilike(column: string, pattern: string): this {
this.url.searchParams.append(column, `ilike.${pattern}`)
return this
}
ilikeAllOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
ilikeAllOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches all of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAllOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `ilike(all).{${patterns.join(',')}}`)
return this
}
ilikeAnyOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
ilikeAnyOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches any of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAnyOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `ilike(any).{${patterns.join(',')}}`)
return this
}
is<ColumnName extends string & keyof Row>(
column: ColumnName,
value: Row[ColumnName] & (boolean | null)
): this
is(column: string, value: boolean | null): this
/**
* Match only rows where `column` IS `value`.
*
* For non-boolean columns, this is only relevant for checking if the value of
* `column` is NULL by setting `value` to `null`.
*
* For boolean columns, you can also set `value` to `true` or `false` and it
* will behave the same way as `.eq()`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
is(column: string, value: boolean | null): this {
this.url.searchParams.append(column, `is.${value}`)
return this
}
/**
* Match only rows where `column` is included in the `values` array.
*
* @param column - The column to filter on
* @param values - The values array to filter with
*/
in<ColumnName extends string>(
column: ColumnName,
values: ReadonlyArray<
ResolveFilterValue<Schema, Row, ColumnName> extends never
? unknown
: // We want to infer the type before wrapping it into a `NonNullable` to avoid too deep
// type resolution error
ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? ResolvedFilterValue
: // We should never enter this case as all the branches are covered above
never
>
): this {
const cleanedValues = Array.from(new Set(values))
.map((s) => {
// handle postgrest reserved characters
// https://postgrest.org/en/v7.0.0/api.html#reserved-characters
if (typeof s === 'string' && new RegExp('[,()]').test(s)) return `"${s}"`
else return `${s}`
})
.join(',')
this.url.searchParams.append(column, `in.(${cleanedValues})`)
return this
}
contains<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>
): this
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* `column` contains every element appearing in `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this {
if (typeof value === 'string') {
// range types can be inclusive '[', ']' or exclusive '(', ')' so just
// keep it simple and accept a string
this.url.searchParams.append(column, `cs.${value}`)
} else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cs.{${value.join(',')}}`)
} else {
// json
this.url.searchParams.append(column, `cs.${JSON.stringify(value)}`)
}
return this
}
containedBy<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>
): this
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* every element appearing in `column` is contained by `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `cd.${value}`)
} else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cd.{${value.join(',')}}`)
} else {
// json
this.url.searchParams.append(column, `cd.${JSON.stringify(value)}`)
}
return this
}
rangeGt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeGt(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is greater than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGt(column: string, range: string): this {
this.url.searchParams.append(column, `sr.${range}`)
return this
}
rangeGte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeGte(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or greater than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGte(column: string, range: string): this {
this.url.searchParams.append(column, `nxl.${range}`)
return this
}
rangeLt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeLt(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is less than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLt(column: string, range: string): this {
this.url.searchParams.append(column, `sl.${range}`)
return this
}
rangeLte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeLte(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or less than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLte(column: string, range: string): this {
this.url.searchParams.append(column, `nxr.${range}`)
return this
}
rangeAdjacent<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeAdjacent(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where `column` is
* mutually exclusive to `range` and there can be no element between the two
* ranges.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeAdjacent(column: string, range: string): this {
this.url.searchParams.append(column, `adj.${range}`)
return this
}
overlaps<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]>
): this
overlaps(column: string, value: string | readonly unknown[]): this
/**
* Only relevant for array and range columns. Match only rows where
* `column` and `value` have an element in common.
*
* @param column - The array or range column to filter on
* @param value - The array or range value to filter with
*/
overlaps(column: string, value: string | readonly unknown[]): this {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `ov.${value}`)
} else {
// array
this.url.searchParams.append(column, `ov.{${value.join(',')}}`)
}
return this
}
textSearch<ColumnName extends string & keyof Row>(
column: ColumnName,
query: string,
options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch' }
): this
textSearch(
column: string,
query: string,
options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch' }
): this
/**
* Only relevant for text and tsvector columns. Match only rows where
* `column` matches the query string in `query`.
*
* @param column - The text or tsvector column to filter on
* @param query - The query text to match with
* @param options - Named parameters
* @param options.config - The text search configuration to use
* @param options.type - Change how the `query` text is interpreted
*/
textSearch(
column: string,
query: string,
{ config, type }: { config?: string; type?: 'plain' | 'phrase' | 'websearch' } = {}
): this {
let typePart = ''
if (type === 'plain') {
typePart = 'pl'
} else if (type === 'phrase') {
typePart = 'ph'
} else if (type === 'websearch') {
typePart = 'w'
}
const configPart = config === undefined ? '' : `(${config})`
this.url.searchParams.append(column, `${typePart}fts${configPart}.${query}`)
return this
}
match<ColumnName extends string & keyof Row>(query: Record<ColumnName, Row[ColumnName]>): this
match(query: Record<string, unknown>): this
/**
* Match only rows where each column in `query` keys is equal to its
* associated value. Shorthand for multiple `.eq()`s.
*
* @param query - The object to filter with, with column names as keys mapped
* to their filter values
*/
match(query: Record<string, unknown>): this {
Object.entries(query).forEach(([column, value]) => {
this.url.searchParams.append(column, `eq.${value}`)
})
return this
}
not<ColumnName extends string & keyof Row>(
column: ColumnName,
operator: FilterOperator,
value: Row[ColumnName]
): this
not(column: string, operator: string, value: unknown): this
/**
* Match only rows which doesn't satisfy the filter.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to be negated to filter with, following
* PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
not(column: string, operator: string, value: unknown): this {
this.url.searchParams.append(column, `not.${operator}.${value}`)
return this
}
/**
* Match only rows which satisfy at least one of the filters.
*
* Unlike most filters, `filters` is used as-is and needs to follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure it's properly sanitized.
*
* It's currently not possible to do an `.or()` filter across multiple tables.
*
* @param filters - The filters to use, following PostgREST syntax
* @param options - Named parameters
* @param options.referencedTable - Set this to filter on referenced tables
* instead of the parent table
* @param options.foreignTable - Deprecated, use `referencedTable` instead
*/
or(
filters: string,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const key = referencedTable ? `${referencedTable}.or` : 'or'
this.url.searchParams.append(key, `(${filters})`)
return this
}
filter<ColumnName extends string & keyof Row>(
column: ColumnName,
operator: `${'' | 'not.'}${FilterOperator}`,
value: unknown
): this
filter(column: string, operator: string, value: unknown): this
/**
* Match only rows which satisfy the filter. This is an escape hatch - you
* should use the specific filter methods wherever possible.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to filter with, following PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
filter(column: string, operator: string, value: unknown): this {
this.url.searchParams.append(column, `${operator}.${value}`)
return this
}
}

View File

@@ -0,0 +1,381 @@
import PostgrestBuilder from './PostgrestBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import { GetResult } from './select-query-parser/result'
import { Fetch, GenericSchema, GenericTable, GenericView } from './types'
export default class PostgrestQueryBuilder<
Schema extends GenericSchema,
Relation extends GenericTable | GenericView,
RelationName = unknown,
Relationships = Relation extends { Relationships: infer R } ? R : unknown
> {
url: URL
headers: Record<string, string>
schema?: string
signal?: AbortSignal
fetch?: Fetch
constructor(
url: URL,
{
headers = {},
schema,
fetch,
}: {
headers?: Record<string, string>
schema?: string
fetch?: Fetch
}
) {
this.url = url
this.headers = headers
this.schema = schema
this.fetch = fetch
}
/**
* Perform a SELECT query on the table or view.
*
* @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName`
*
* @param options - Named parameters
*
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
*
* @param options.count - Count algorithm to use to count rows in the table or view.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
select<
Query extends string = '*',
ResultOne = GetResult<Schema, Relation['Row'], RelationName, Relationships, Query>
>(
columns?: Query,
{
head = false,
count,
}: {
head?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], ResultOne[], RelationName, Relationships> {
const method = head ? 'HEAD' : 'GET'
// Remove whitespaces except when quoted
let quoted = false
const cleanedColumns = (columns ?? '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return ''
}
if (c === '"') {
quoted = !quoted
}
return c
})
.join('')
this.url.searchParams.set('select', cleanedColumns)
if (count) {
this.headers['Prefer'] = `count=${count}`
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<ResultOne[]>)
}
// TODO(v3): Make `defaultToNull` consistent for both single & bulk inserts.
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row,
options?: {
count?: 'exact' | 'planned' | 'estimated'
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row[],
options?: {
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
/**
* Perform an INSERT into the table or view.
*
* By default, inserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to insert. Pass an object to insert a single row
* or an array to insert multiple rows.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count inserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. Only applies for bulk
* inserts.
*/
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row | Row[],
{
count,
defaultToNull = true,
}: {
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'POST'
const prefersHeaders = []
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (!defaultToNull) {
prefersHeaders.push('missing=default')
}
this.headers['Prefer'] = prefersHeaders.join(',')
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), [] as string[])
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`)
this.url.searchParams.set('columns', uniqueColumns.join(','))
}
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
// TODO(v3): Make `defaultToNull` consistent for both single & bulk upserts.
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row,
options?: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row[],
options?: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
/**
* Perform an UPSERT on the table or view. Depending on the column(s) passed
* to `onConflict`, `.upsert()` allows you to perform the equivalent of
* `.insert()` if a row with the corresponding `onConflict` columns doesn't
* exist, or if it does exist, perform an alternative action depending on
* `ignoreDuplicates`.
*
* By default, upserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to upsert with. Pass an object to upsert a
* single row or an array to upsert multiple rows.
*
* @param options - Named parameters
*
* @param options.onConflict - Comma-separated UNIQUE column(s) to specify how
* duplicate rows are determined. Two rows are duplicates if all the
* `onConflict` columns are equal.
*
* @param options.ignoreDuplicates - If `true`, duplicate rows are ignored. If
* `false`, duplicate rows are merged with existing rows.
*
* @param options.count - Count algorithm to use to count upserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. This only applies when
* inserting new rows, not when merging with existing rows under
* `ignoreDuplicates: false`. This also only applies when doing bulk upserts.
*/
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row | Row[],
{
onConflict,
ignoreDuplicates = false,
count,
defaultToNull = true,
}: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'POST'
const prefersHeaders = [`resolution=${ignoreDuplicates ? 'ignore' : 'merge'}-duplicates`]
if (onConflict !== undefined) this.url.searchParams.set('on_conflict', onConflict)
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (!defaultToNull) {
prefersHeaders.push('missing=default')
}
this.headers['Prefer'] = prefersHeaders.join(',')
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), [] as string[])
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`)
this.url.searchParams.set('columns', uniqueColumns.join(','))
}
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
/**
* Perform an UPDATE on the table or view.
*
* By default, updated rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param values - The values to update with
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count updated rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
update<Row extends Relation extends { Update: unknown } ? Relation['Update'] : never>(
values: Row,
{
count,
}: {
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'PATCH'
const prefersHeaders = []
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
this.headers['Prefer'] = prefersHeaders.join(',')
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
/**
* Perform a DELETE on the table or view.
*
* By default, deleted rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count deleted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
delete({
count,
}: {
count?: 'exact' | 'planned' | 'estimated'
} = {}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'DELETE'
const prefersHeaders = []
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (this.headers['Prefer']) {
prefersHeaders.unshift(this.headers['Prefer'])
}
this.headers['Prefer'] = prefersHeaders.join(',')
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
}

View File

@@ -0,0 +1,327 @@
import PostgrestBuilder from './PostgrestBuilder'
import { GetResult } from './select-query-parser/result'
import { GenericSchema, CheckMatchingArrayTypes } from './types'
export default class PostgrestTransformBuilder<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
Result,
RelationName = unknown,
Relationships = unknown
> extends PostgrestBuilder<Result> {
/**
* Perform a SELECT on the query result.
*
* By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not
* return modified rows. By calling this method, modified rows are returned in
* `data`.
*
* @param columns - The columns to retrieve, separated by commas
*/
select<
Query extends string = '*',
NewResultOne = GetResult<Schema, Row, RelationName, Relationships, Query>
>(
columns?: Query
): PostgrestTransformBuilder<Schema, Row, NewResultOne[], RelationName, Relationships> {
// Remove whitespaces except when quoted
let quoted = false
const cleanedColumns = (columns ?? '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return ''
}
if (c === '"') {
quoted = !quoted
}
return c
})
.join('')
this.url.searchParams.set('select', cleanedColumns)
if (this.headers['Prefer']) {
this.headers['Prefer'] += ','
}
this.headers['Prefer'] += 'return=representation'
return this as unknown as PostgrestTransformBuilder<
Schema,
Row,
NewResultOne[],
RelationName,
Relationships
>
}
order<ColumnName extends string & keyof Row>(
column: ColumnName,
options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: undefined }
): this
order(
column: string,
options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: string }
): this
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order<ColumnName extends string & keyof Row>(
column: ColumnName,
options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: undefined }
): this
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order(
column: string,
options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: string }
): this
/**
* Order the query result by `column`.
*
* You can call this method multiple times to order by multiple columns.
*
* You can order referenced tables, but it only affects the ordering of the
* parent table if you use `!inner` in the query.
*
* @param column - The column to order by
* @param options - Named parameters
* @param options.ascending - If `true`, the result will be in ascending order
* @param options.nullsFirst - If `true`, `null`s appear first. If `false`,
* `null`s appear last.
* @param options.referencedTable - Set this to order a referenced table by
* its columns
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
order(
column: string,
{
ascending = true,
nullsFirst,
foreignTable,
referencedTable = foreignTable,
}: {
ascending?: boolean
nullsFirst?: boolean
foreignTable?: string
referencedTable?: string
} = {}
): this {
const key = referencedTable ? `${referencedTable}.order` : 'order'
const existingOrder = this.url.searchParams.get(key)
this.url.searchParams.set(
key,
`${existingOrder ? `${existingOrder},` : ''}${column}.${ascending ? 'asc' : 'desc'}${
nullsFirst === undefined ? '' : nullsFirst ? '.nullsfirst' : '.nullslast'
}`
)
return this
}
/**
* Limit the query result by `count`.
*
* @param count - The maximum number of rows to return
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
limit(
count: number,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const key = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`
this.url.searchParams.set(key, `${count}`)
return this
}
/**
* Limit the query result by starting at an offset `from` and ending at the offset `to`.
* Only records within this range are returned.
* This respects the query order and if there is no order clause the range could behave unexpectedly.
* The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third
* and fourth rows of the query.
*
* @param from - The starting index from which to limit the result
* @param to - The last index to which to limit the result
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
range(
from: number,
to: number,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const keyOffset =
typeof referencedTable === 'undefined' ? 'offset' : `${referencedTable}.offset`
const keyLimit = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`
this.url.searchParams.set(keyOffset, `${from}`)
// Range is inclusive, so add 1
this.url.searchParams.set(keyLimit, `${to - from + 1}`)
return this
}
/**
* Set the AbortSignal for the fetch request.
*
* @param signal - The AbortSignal to use for the fetch request
*/
abortSignal(signal: AbortSignal): this {
this.signal = signal
return this
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be one row (e.g. using `.limit(1)`), otherwise this
* returns an error.
*/
single<
ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never
>(): PostgrestBuilder<ResultOne> {
this.headers['Accept'] = 'application/vnd.pgrst.object+json'
return this as unknown as PostgrestBuilder<ResultOne>
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be zero or one row (e.g. using `.limit(1)`), otherwise
* this returns an error.
*/
maybeSingle<
ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never
>(): PostgrestBuilder<ResultOne | null> {
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.method === 'GET') {
this.headers['Accept'] = 'application/json'
} else {
this.headers['Accept'] = 'application/vnd.pgrst.object+json'
}
this.isMaybeSingle = true
return this as unknown as PostgrestBuilder<ResultOne | null>
}
/**
* Return `data` as a string in CSV format.
*/
csv(): PostgrestBuilder<string> {
this.headers['Accept'] = 'text/csv'
return this as unknown as PostgrestBuilder<string>
}
/**
* Return `data` as an object in [GeoJSON](https://geojson.org) format.
*/
geojson(): PostgrestBuilder<Record<string, unknown>> {
this.headers['Accept'] = 'application/geo+json'
return this as unknown as PostgrestBuilder<Record<string, unknown>>
}
/**
* Return `data` as the EXPLAIN plan for the query.
*
* You need to enable the
* [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain)
* setting before using this method.
*
* @param options - Named parameters
*
* @param options.analyze - If `true`, the query will be executed and the
* actual run time will be returned
*
* @param options.verbose - If `true`, the query identifier will be returned
* and `data` will include the output columns of the query
*
* @param options.settings - If `true`, include information on configuration
* parameters that affect query planning
*
* @param options.buffers - If `true`, include information on buffer usage
*
* @param options.wal - If `true`, include information on WAL record generation
*
* @param options.format - The format of the output, can be `"text"` (default)
* or `"json"`
*/
explain({
analyze = false,
verbose = false,
settings = false,
buffers = false,
wal = false,
format = 'text',
}: {
analyze?: boolean
verbose?: boolean
settings?: boolean
buffers?: boolean
wal?: boolean
format?: 'json' | 'text'
} = {}): PostgrestBuilder<Record<string, unknown>[]> | PostgrestBuilder<string> {
const options = [
analyze ? 'analyze' : null,
verbose ? 'verbose' : null,
settings ? 'settings' : null,
buffers ? 'buffers' : null,
wal ? 'wal' : null,
]
.filter(Boolean)
.join('|')
// An Accept header can carry multiple media types but postgrest-js always sends one
const forMediatype = this.headers['Accept'] ?? 'application/json'
this.headers[
'Accept'
] = `application/vnd.pgrst.plan+${format}; for="${forMediatype}"; options=${options};`
if (format === 'json') return this as unknown as PostgrestBuilder<Record<string, unknown>[]>
else return this as unknown as PostgrestBuilder<string>
}
/**
* Rollback the query.
*
* `data` will still be returned, but the query is not committed.
*/
rollback(): this {
if ((this.headers['Prefer'] ?? '').trim().length > 0) {
this.headers['Prefer'] += ',tx=rollback'
} else {
this.headers['Prefer'] = 'tx=rollback'
}
return this
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestTransformBuilder<
Schema,
Row,
CheckMatchingArrayTypes<Result, NewResult>,
RelationName,
Relationships
> {
return this as unknown as PostgrestTransformBuilder<
Schema,
Row,
CheckMatchingArrayTypes<Result, NewResult>,
RelationName,
Relationships
>
}
}

2
node_modules/@supabase/postgrest-js/src/constants.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
import { version } from './version'
export const DEFAULT_HEADERS = { 'X-Client-Info': `postgrest-js/${version}` }

34
node_modules/@supabase/postgrest-js/src/index.ts generated vendored Normal file
View File

@@ -0,0 +1,34 @@
// Always update wrapper.mjs when updating this file.
import PostgrestClient from './PostgrestClient'
import PostgrestQueryBuilder from './PostgrestQueryBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import PostgrestTransformBuilder from './PostgrestTransformBuilder'
import PostgrestBuilder from './PostgrestBuilder'
import PostgrestError from './PostgrestError'
export {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}
export default {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}
export type {
PostgrestResponse,
PostgrestResponseFailure,
PostgrestResponseSuccess,
PostgrestSingleResponse,
PostgrestMaybeSingleResponse,
} from './types'
// https://github.com/supabase/postgrest-js/issues/551
// To be replaced with a helper type that only uses public types
export type { GetResult as UnstableGetResult } from './select-query-parser/result'

View File

@@ -0,0 +1,469 @@
// Credits to @bnjmnt4n (https://www.npmjs.com/package/postgrest-query)
// See https://github.com/PostgREST/postgrest/blob/2f91853cb1de18944a4556df09e52450b881cfb3/src/PostgREST/ApiRequest/QueryParams.hs#L282-L284
import { SimplifyDeep } from '../types'
import { JsonPathToAccessor } from './utils'
/**
* Parses a query.
* A query is a sequence of nodes, separated by `,`, ensuring that there is
* no remaining input after all nodes have been parsed.
*
* Returns an array of parsed nodes, or an error.
*/
export type ParseQuery<Query extends string> = string extends Query
? GenericStringError
: ParseNodes<EatWhitespace<Query>> extends [infer Nodes, `${infer Remainder}`]
? Nodes extends Ast.Node[]
? EatWhitespace<Remainder> extends ''
? SimplifyDeep<Nodes>
: ParserError<`Unexpected input: ${Remainder}`>
: ParserError<'Invalid nodes array structure'>
: ParseNodes<EatWhitespace<Query>>
/**
* Notes: all `Parse*` types assume that their input strings have their whitespace
* removed. They return tuples of ["Return Value", "Remainder of text"] or
* a `ParserError`.
*/
/**
* Parses a sequence of nodes, separated by `,`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"] or an error.
*/
type ParseNodes<Input extends string> = string extends Input
? GenericStringError
: ParseNodesHelper<Input, []>
type ParseNodesHelper<Input extends string, Nodes extends Ast.Node[]> = ParseNode<Input> extends [
infer Node,
`${infer Remainder}`
]
? Node extends Ast.Node
? EatWhitespace<Remainder> extends `,${infer Remainder}`
? ParseNodesHelper<EatWhitespace<Remainder>, [...Nodes, Node]>
: [[...Nodes, Node], EatWhitespace<Remainder>]
: ParserError<'Invalid node type in nodes helper'>
: ParseNode<Input>
/**
* Parses a node.
* A node is one of the following:
* - `*`
* - a field, as defined above
* - a renamed field, `renamed_field:field`
* - a spread field, `...field`
*/
type ParseNode<Input extends string> = Input extends ''
? ParserError<'Empty string'>
: // `*`
Input extends `*${infer Remainder}`
? [Ast.StarNode, EatWhitespace<Remainder>]
: // `...field`
Input extends `...${infer Remainder}`
? ParseField<EatWhitespace<Remainder>> extends [infer TargetField, `${infer Remainder}`]
? TargetField extends Ast.FieldNode
? [{ type: 'spread'; target: TargetField }, EatWhitespace<Remainder>]
: ParserError<'Invalid target field type in spread'>
: ParserError<`Unable to parse spread resource at \`${Input}\``>
: ParseIdentifier<Input> extends [infer NameOrAlias, `${infer Remainder}`]
? EatWhitespace<Remainder> extends `::${infer _}`
? // It's a type cast and not an alias, so treat it as part of the field.
ParseField<Input>
: EatWhitespace<Remainder> extends `:${infer Remainder}`
? // `alias:`
ParseField<EatWhitespace<Remainder>> extends [infer Field, `${infer Remainder}`]
? Field extends Ast.FieldNode
? [Omit<Field, 'alias'> & { alias: NameOrAlias }, EatWhitespace<Remainder>]
: ParserError<'Invalid field type in alias parsing'>
: ParserError<`Unable to parse renamed field at \`${Input}\``>
: // Otherwise, just parse it as a field without alias.
ParseField<Input>
: ParserError<`Expected identifier at \`${Input}\``>
/**
* Parses a field without preceding alias.
* A field is one of the following:
* - a top-level `count` field: https://docs.postgrest.org/en/v12/references/api/aggregate_functions.html#the-case-of-count
* - a field with an embedded resource
* - `field(nodes)`
* - `field!hint(nodes)`
* - `field!inner(nodes)`
* - `field!left(nodes)`
* - `field!hint!inner(nodes)`
* - `field!hint!left(nodes)`
* - a field without an embedded resource (see {@link ParseNonEmbeddedResourceField})
*/
type ParseField<Input extends string> = Input extends ''
? ParserError<'Empty string'>
: ParseIdentifier<Input> extends [infer Name, `${infer Remainder}`]
? Name extends 'count'
? ParseCountField<Input>
: Remainder extends `!inner${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field!inner(nodes)`
[{ type: 'field'; name: Name; innerJoin: true; children: Children }, Remainder]
: ParserError<'Invalid children array in inner join'>
: CreateParserErrorIfRequired<
ParseEmbeddedResource<EatWhitespace<Remainder>>,
`Expected embedded resource after "!inner" at \`${Remainder}\``
>
: EatWhitespace<Remainder> extends `!left${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field!left(nodes)`
// !left is a noise word - treat it the same way as a non-`!inner`.
[{ type: 'field'; name: Name; children: Children }, EatWhitespace<Remainder>]
: ParserError<'Invalid children array in left join'>
: CreateParserErrorIfRequired<
ParseEmbeddedResource<EatWhitespace<Remainder>>,
`Expected embedded resource after "!left" at \`${EatWhitespace<Remainder>}\``
>
: EatWhitespace<Remainder> extends `!${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [infer Hint, `${infer Remainder}`]
? EatWhitespace<Remainder> extends `!inner${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
]
? Children extends Ast.Node[]
? // `field!hint!inner(nodes)`
[
{ type: 'field'; name: Name; hint: Hint; innerJoin: true; children: Children },
EatWhitespace<Remainder>
]
: ParserError<'Invalid children array in hint inner join'>
: ParseEmbeddedResource<EatWhitespace<Remainder>>
: ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
]
? Children extends Ast.Node[]
? // `field!hint(nodes)`
[
{ type: 'field'; name: Name; hint: Hint; children: Children },
EatWhitespace<Remainder>
]
: ParserError<'Invalid children array in hint'>
: ParseEmbeddedResource<EatWhitespace<Remainder>>
: ParserError<`Expected identifier after "!" at \`${EatWhitespace<Remainder>}\``>
: EatWhitespace<Remainder> extends `(${infer _}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field(nodes)`
[{ type: 'field'; name: Name; children: Children }, EatWhitespace<Remainder>]
: ParserError<'Invalid children array in field'>
: // Return error if start of embedded resource was detected but not found.
ParseEmbeddedResource<EatWhitespace<Remainder>>
: // Otherwise it's a non-embedded resource field.
ParseNonEmbeddedResourceField<Input>
: ParserError<`Expected identifier at \`${Input}\``>
type ParseCountField<Input extends string> = ParseIdentifier<Input> extends [
'count',
`${infer Remainder}`
]
? (
EatWhitespace<Remainder> extends `()${infer Remainder_}`
? EatWhitespace<Remainder_>
: EatWhitespace<Remainder>
) extends `${infer Remainder}`
? Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [
{ type: 'field'; name: 'count'; aggregateFunction: 'count'; castType: CastType },
Remainder
]
: ParseFieldTypeCast<Remainder>
: [{ type: 'field'; name: 'count'; aggregateFunction: 'count' }, Remainder]
: never
: ParserError<`Expected "count" at \`${Input}\``>
/**
* Parses an embedded resource, which is an opening `(`, followed by a sequence of
* 0 or more nodes separated by `,`, then a closing `)`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"], an error,
* or the original string input indicating that no opening `(` was found.
*/
type ParseEmbeddedResource<Input extends string> = Input extends `(${infer Remainder}`
? EatWhitespace<Remainder> extends `)${infer Remainder}`
? [[], EatWhitespace<Remainder>]
: ParseNodes<EatWhitespace<Remainder>> extends [infer Nodes, `${infer Remainder}`]
? Nodes extends Ast.Node[]
? EatWhitespace<Remainder> extends `)${infer Remainder}`
? [Nodes, EatWhitespace<Remainder>]
: ParserError<`Expected ")" at \`${EatWhitespace<Remainder>}\``>
: ParserError<'Invalid nodes array in embedded resource'>
: ParseNodes<EatWhitespace<Remainder>>
: ParserError<`Expected "(" at \`${Input}\``>
/**
* Parses a field excluding embedded resources, without preceding field renaming.
* This is one of the following:
* - `field`
* - `field.aggregate()`
* - `field.aggregate()::type`
* - `field::type`
* - `field::type.aggregate()`
* - `field::type.aggregate()::type`
* - `field->json...`
* - `field->json.aggregate()`
* - `field->json.aggregate()::type`
* - `field->json::type`
* - `field->json::type.aggregate()`
* - `field->json::type.aggregate()::type`
*/
type ParseNonEmbeddedResourceField<Input extends string> = ParseIdentifier<Input> extends [
infer Name,
`${infer Remainder}`
]
? // Parse optional JSON path.
(
Remainder extends `->${infer PathAndRest}`
? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
]
? [
{
type: 'field'
name: Name
alias: PropertyName
castType: PropertyType
jsonPath: JsonPathToAccessor<
PathAndRest extends `${infer Path},${string}` ? Path : PathAndRest
>
},
Remainder
]
: ParseJsonAccessor<Remainder>
: [{ type: 'field'; name: Name }, Remainder]
) extends infer Parsed
? Parsed extends [infer Field, `${infer Remainder}`]
? // Parse optional typecast or aggregate function input typecast.
(
Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [Omit<Field, 'castType'> & { castType: CastType }, Remainder]
: ParseFieldTypeCast<Remainder>
: [Field, Remainder]
) extends infer Parsed
? Parsed extends [infer Field, `${infer Remainder}`]
? // Parse optional aggregate function.
Remainder extends `.${infer _}`
? ParseFieldAggregation<Remainder> extends [
infer AggregateFunction,
`${infer Remainder}`
]
? // Parse optional aggregate function output typecast.
Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [
Omit<Field, 'castType'> & {
aggregateFunction: AggregateFunction
castType: CastType
},
Remainder
]
: ParseFieldTypeCast<Remainder>
: [Field & { aggregateFunction: AggregateFunction }, Remainder]
: ParseFieldAggregation<Remainder>
: [Field, Remainder]
: Parsed
: never
: Parsed
: never
: ParserError<`Expected identifier at \`${Input}\``>
/**
* Parses a JSON property accessor of the shape `->a->b->c`. The last accessor in
* the series may convert to text by using the ->> operator instead of ->.
*
* Returns a tuple of ["Last property name", "Last property type", "Remainder of text"]
*/
type ParseJsonAccessor<Input extends string> = Input extends `->${infer Remainder}`
? Remainder extends `>${infer Remainder}`
? ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`]
? [Name, 'text', EatWhitespace<Remainder>]
: ParserError<'Expected property name after `->>`'>
: ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`]
? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
]
? [PropertyName, PropertyType, EatWhitespace<Remainder>]
: [Name, 'json', EatWhitespace<Remainder>]
: ParserError<'Expected property name after `->`'>
: ParserError<'Expected ->'>
/**
* Parses a field typecast (`::type`), returning a tuple of ["Type", "Remainder of text"].
*/
type ParseFieldTypeCast<Input extends string> = EatWhitespace<Input> extends `::${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [`${infer CastType}`, `${infer Remainder}`]
? [CastType, EatWhitespace<Remainder>]
: ParserError<`Invalid type for \`::\` operator at \`${Remainder}\``>
: ParserError<'Expected ::'>
/**
* Parses a field aggregation (`.max()`), returning a tuple of ["Aggregate function", "Remainder of text"]
*/
type ParseFieldAggregation<Input extends string> =
EatWhitespace<Input> extends `.${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [
`${infer FunctionName}`,
`${infer Remainder}`
]
? // Ensure that aggregation function is valid.
FunctionName extends Token.AggregateFunction
? EatWhitespace<Remainder> extends `()${infer Remainder}`
? [FunctionName, EatWhitespace<Remainder>]
: ParserError<`Expected \`()\` after \`.\` operator \`${FunctionName}\``>
: ParserError<`Invalid type for \`.\` operator \`${FunctionName}\``>
: ParserError<`Invalid type for \`.\` operator at \`${Remainder}\``>
: ParserError<'Expected .'>
/**
* Parses a (possibly double-quoted) identifier.
* Identifiers are sequences of 1 or more letters.
*/
type ParseIdentifier<Input extends string> = ParseLetters<Input> extends [
infer Name,
`${infer Remainder}`
]
? [Name, EatWhitespace<Remainder>]
: ParseQuotedLetters<Input> extends [infer Name, `${infer Remainder}`]
? [Name, EatWhitespace<Remainder>]
: ParserError<`No (possibly double-quoted) identifier at \`${Input}\``>
/**
* Parse a consecutive sequence of 1 or more letter, where letters are `[0-9a-zA-Z_]`.
*/
type ParseLetters<Input extends string> = string extends Input
? GenericStringError
: ParseLettersHelper<Input, ''> extends [`${infer Letters}`, `${infer Remainder}`]
? Letters extends ''
? ParserError<`Expected letter at \`${Input}\``>
: [Letters, Remainder]
: ParseLettersHelper<Input, ''>
type ParseLettersHelper<Input extends string, Acc extends string> = string extends Input
? GenericStringError
: Input extends `${infer L}${infer Remainder}`
? L extends Token.Letter
? ParseLettersHelper<Remainder, `${Acc}${L}`>
: [Acc, Input]
: [Acc, '']
/**
* Parse a consecutive sequence of 1 or more double-quoted letters,
* where letters are `[^"]`.
*/
type ParseQuotedLetters<Input extends string> = string extends Input
? GenericStringError
: Input extends `"${infer Remainder}`
? ParseQuotedLettersHelper<Remainder, ''> extends [`${infer Letters}`, `${infer Remainder}`]
? Letters extends ''
? ParserError<`Expected string at \`${Remainder}\``>
: [Letters, Remainder]
: ParseQuotedLettersHelper<Remainder, ''>
: ParserError<`Not a double-quoted string at \`${Input}\``>
type ParseQuotedLettersHelper<Input extends string, Acc extends string> = string extends Input
? GenericStringError
: Input extends `${infer L}${infer Remainder}`
? L extends '"'
? [Acc, Remainder]
: ParseQuotedLettersHelper<Remainder, `${Acc}${L}`>
: ParserError<`Missing closing double-quote in \`"${Acc}${Input}\``>
/**
* Trims whitespace from the left of the input.
*/
type EatWhitespace<Input extends string> = string extends Input
? GenericStringError
: Input extends `${Token.Whitespace}${infer Remainder}`
? EatWhitespace<Remainder>
: Input
/**
* Creates a new {@link ParserError} if the given input is not already a parser error.
*/
type CreateParserErrorIfRequired<Input, Message extends string> = Input extends ParserError<string>
? Input
: ParserError<Message>
/**
* Parser errors.
*/
export type ParserError<Message extends string> = { error: true } & Message
type GenericStringError = ParserError<'Received a generic string'>
export namespace Ast {
export type Node = FieldNode | StarNode | SpreadNode
export type FieldNode = {
type: 'field'
name: string
alias?: string
hint?: string
innerJoin?: true
castType?: string
jsonPath?: string
aggregateFunction?: Token.AggregateFunction
children?: Node[]
}
export type StarNode = {
type: 'star'
}
export type SpreadNode = {
type: 'spread'
target: FieldNode & { children: Node[] }
}
}
namespace Token {
export type Whitespace = ' ' | '\n' | '\t'
type LowerAlphabet =
| 'a'
| 'b'
| 'c'
| 'd'
| 'e'
| 'f'
| 'g'
| 'h'
| 'i'
| 'j'
| 'k'
| 'l'
| 'm'
| 'n'
| 'o'
| 'p'
| 'q'
| 'r'
| 's'
| 't'
| 'u'
| 'v'
| 'w'
| 'x'
| 'y'
| 'z'
type Alphabet = LowerAlphabet | Uppercase<LowerAlphabet>
type Digit = '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0'
export type Letter = Alphabet | Digit | '_'
export type AggregateFunction = 'count' | 'sum' | 'avg' | 'min' | 'max'
}

View File

@@ -0,0 +1,422 @@
import { GenericTable } from '../types'
import { ContainsNull, GenericRelationship, PostgreSQLTypes } from './types'
import { Ast, ParseQuery } from './parser'
import {
AggregateFunctions,
ExtractFirstProperty,
GenericSchema,
IsNonEmptyArray,
Prettify,
TablesAndViews,
TypeScriptTypes,
} from './types'
import {
CheckDuplicateEmbededReference,
GetFieldNodeResultName,
IsAny,
IsRelationNullable,
IsStringUnion,
JsonPathToType,
ResolveRelationship,
SelectQueryError,
} from './utils'
/**
* Main entry point for constructing the result type of a PostgREST query.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Query - The select query string literal to parse.
*/
export type GetResult<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName,
Relationships,
Query extends string
> = IsAny<Schema> extends true
? ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RelationName extends string
? ProcessNodesWithoutSchema<ParsedQuery>
: any
: ParsedQuery
: any
: Relationships extends null // For .rpc calls the passed relationships will be null in that case, the result will always be the function return type
? ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RPCCallNodes<ParsedQuery, RelationName extends string ? RelationName : 'rpc_call', Row>
: ParsedQuery
: Row
: ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RelationName extends string
? Relationships extends GenericRelationship[]
? ProcessNodes<Schema, Row, RelationName, Relationships, ParsedQuery>
: SelectQueryError<'Invalid Relationships cannot infer result type'>
: SelectQueryError<'Invalid RelationName cannot infer result type'>
: ParsedQuery
: never
type ProcessSimpleFieldWithoutSchema<Field extends Ast.FieldNode> =
Field['aggregateFunction'] extends AggregateFunctions
? {
// An aggregate function will always override the column name id.sum() will become sum
// except if it has been aliased
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? TypeScriptTypes<Field['castType']>
: number
}
: {
// Aliases override the property name in the result
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes // We apply the detected casted as the result type
? TypeScriptTypes<Field['castType']>
: any
}
type ProcessFieldNodeWithoutSchema<Node extends Ast.FieldNode> = IsNonEmptyArray<
Node['children']
> extends true
? {
[K in GetFieldNodeResultName<Node>]: Node['children'] extends Ast.Node[]
? ProcessNodesWithoutSchema<Node['children']>[]
: ProcessSimpleFieldWithoutSchema<Node>
}
: ProcessSimpleFieldWithoutSchema<Node>
/**
* Processes a single Node without schema and returns the resulting TypeScript type.
*/
type ProcessNodeWithoutSchema<Node extends Ast.Node> = Node extends Ast.StarNode
? any
: Node extends Ast.SpreadNode
? Node['target']['children'] extends Ast.StarNode[]
? any
: Node['target']['children'] extends Ast.FieldNode[]
? {
[P in Node['target']['children'][number] as GetFieldNodeResultName<P>]: P['castType'] extends PostgreSQLTypes
? TypeScriptTypes<P['castType']>
: any
}
: any
: Node extends Ast.FieldNode
? ProcessFieldNodeWithoutSchema<Node>
: any
/**
* Processes nodes when Schema is any, providing basic type inference
*/
type ProcessNodesWithoutSchema<
Nodes extends Ast.Node[],
Acc extends Record<string, unknown> = {}
> = Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessNodeWithoutSchema<FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? ProcessNodesWithoutSchema<RestNodes, Acc & FieldResult>
: FieldResult
: any
: any
: any
: Prettify<Acc>
/**
* Processes a single Node from a select chained after a rpc call
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current rpc function
* @param NodeType - The Node to process.
*/
export type ProcessRPCNode<
Row extends Record<string, unknown>,
RelationName extends string,
NodeType extends Ast.Node
> = NodeType['type'] extends Ast.StarNode['type'] // If the selection is *
? Row
: NodeType['type'] extends Ast.FieldNode['type']
? ProcessSimpleField<Row, RelationName, Extract<NodeType, Ast.FieldNode>>
: SelectQueryError<'RPC Unsupported node type.'>
/**
* Process select call that can be chained after an rpc call
*/
export type RPCCallNodes<
Nodes extends Ast.Node[],
RelationName extends string,
Row extends Record<string, unknown>,
Acc extends Record<string, unknown> = {} // Acc is now an object
> = Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessRPCNode<Row, RelationName, FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? RPCCallNodes<RestNodes, RelationName, Row, Acc & FieldResult>
: FieldResult extends SelectQueryError<infer E>
? SelectQueryError<E>
: SelectQueryError<'Could not retrieve a valid record or error value'>
: SelectQueryError<'Processing node failed.'>
: SelectQueryError<'Invalid rest nodes array in RPC call'>
: SelectQueryError<'Invalid first node in RPC call'>
: Prettify<Acc>
/**
* Recursively processes an array of Nodes and accumulates the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Nodes - An array of AST nodes to process.
* @param Acc - Accumulator for the constructed type.
*/
export type ProcessNodes<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.Node[],
Acc extends Record<string, unknown> = {} // Acc is now an object
> = CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes> extends false
? Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessNode<Schema, Row, RelationName, Relationships, FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? ProcessNodes<Schema, Row, RelationName, Relationships, RestNodes, Acc & FieldResult>
: FieldResult extends SelectQueryError<infer E>
? SelectQueryError<E>
: SelectQueryError<'Could not retrieve a valid record or error value'>
: SelectQueryError<'Processing node failed.'>
: SelectQueryError<'Invalid rest nodes array type in ProcessNodes'>
: SelectQueryError<'Invalid first node type in ProcessNodes'>
: Prettify<Acc>
: Prettify<CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes>>
/**
* Processes a single Node and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param NodeType - The Node to process.
*/
export type ProcessNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
NodeType extends Ast.Node
> =
// TODO: figure out why comparing the `type` property is necessary vs. `NodeType extends Ast.StarNode`
NodeType['type'] extends Ast.StarNode['type'] // If the selection is *
? Row
: NodeType['type'] extends Ast.SpreadNode['type'] // If the selection is a ...spread
? ProcessSpreadNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.SpreadNode>>
: NodeType['type'] extends Ast.FieldNode['type']
? ProcessFieldNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.FieldNode>>
: SelectQueryError<'Unsupported node type.'>
/**
* Processes a FieldNode and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
type ProcessFieldNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode
> = Field['children'] extends []
? {}
: IsNonEmptyArray<Field['children']> extends true // Has embedded resource?
? ProcessEmbeddedResource<Schema, Relationships, Field, RelationName>
: ProcessSimpleField<Row, RelationName, Field>
type ResolveJsonPathType<
Value,
Path extends string | undefined,
CastType extends PostgreSQLTypes
> = Path extends string
? JsonPathToType<Value, Path> extends never
? // Always fallback if JsonPathToType returns never
TypeScriptTypes<CastType>
: JsonPathToType<Value, Path> extends infer PathResult
? PathResult extends string
? // Use the result if it's a string as we know that even with the string accessor ->> it's a valid type
PathResult
: IsStringUnion<PathResult> extends true
? // Use the result if it's a union of strings
PathResult
: CastType extends 'json'
? // If the type is not a string, ensure it was accessed with json accessor ->
PathResult
: // Otherwise it means non-string value accessed with string accessor ->> use the TypeScriptTypes result
TypeScriptTypes<CastType>
: TypeScriptTypes<CastType>
: // No json path, use regular type casting
TypeScriptTypes<CastType>
/**
* Processes a simple field (without embedded resources).
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Field - The FieldNode to process.
*/
type ProcessSimpleField<
Row extends Record<string, unknown>,
RelationName extends string,
Field extends Ast.FieldNode
> = Field['name'] extends keyof Row | 'count'
? Field['aggregateFunction'] extends AggregateFunctions
? {
// An aggregate function will always override the column name id.sum() will become sum
// except if it has been aliased
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? TypeScriptTypes<Field['castType']>
: number
}
: {
// Aliases override the property name in the result
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? ResolveJsonPathType<Row[Field['name']], Field['jsonPath'], Field['castType']>
: Row[Field['name']]
}
: SelectQueryError<`column '${Field['name']}' does not exist on '${RelationName}'.`>
/**
* Processes an embedded resource (relation).
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
export type ProcessEmbeddedResource<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = ResolveRelationship<Schema, Relationships, Field, CurrentTableOrView> extends infer Resolved
? Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>
relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' }
direction: string
}
? ProcessEmbeddedResourceResult<Schema, Resolved, Field, CurrentTableOrView>
: // Otherwise the Resolved is a SelectQueryError return it
{ [K in GetFieldNodeResultName<Field>]: Resolved }
: {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to resolve relationship.'> &
string
}
/**
* Helper type to process the result of an embedded resource.
*/
type ProcessEmbeddedResourceResult<
Schema extends GenericSchema,
Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>
relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' }
direction: string
},
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema>
> = ProcessNodes<
Schema,
Resolved['referencedTable']['Row'],
Field['name'],
Resolved['referencedTable']['Relationships'],
Field['children'] extends undefined
? []
: Exclude<Field['children'], undefined> extends Ast.Node[]
? Exclude<Field['children'], undefined>
: []
> extends infer ProcessedChildren
? {
[K in GetFieldNodeResultName<Field>]: Resolved['direction'] extends 'forward'
? Field extends { innerJoin: true }
? Resolved['relation']['isOneToOne'] extends true
? ProcessedChildren
: ProcessedChildren[]
: Resolved['relation']['isOneToOne'] extends true
? ProcessedChildren | null
: ProcessedChildren[]
: // If the relation is a self-reference it'll always be considered as reverse relationship
Resolved['relation']['referencedRelation'] extends CurrentTableOrView
? // It can either be a reverse reference via a column inclusion (eg: parent_id(*))
// in such case the result will be a single object
Resolved['relation']['match'] extends 'col'
? IsRelationNullable<
TablesAndViews<Schema>[CurrentTableOrView],
Resolved['relation']
> extends true
? ProcessedChildren | null
: ProcessedChildren
: // Or it can be a reference via the reference relation (eg: collections(*))
// in such case, the result will be an array of all the values (all collection with parent_id being the current id)
ProcessedChildren[]
: // Otherwise if it's a non self-reference reverse relationship it's a single object
IsRelationNullable<
TablesAndViews<Schema>[CurrentTableOrView],
Resolved['relation']
> extends true
? Field extends { innerJoin: true }
? ProcessedChildren
: ProcessedChildren | null
: ProcessedChildren
}
: {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to process embedded resource nodes.'> &
string
}
/**
* Processes a SpreadNode by processing its target node.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Spread - The SpreadNode to process.
*/
type ProcessSpreadNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Spread extends Ast.SpreadNode
> = ProcessNode<Schema, Row, RelationName, Relationships, Spread['target']> extends infer Result
? Result extends SelectQueryError<infer E>
? SelectQueryError<E>
: ExtractFirstProperty<Result> extends unknown[]
? {
[K in Spread['target']['name']]: SelectQueryError<`"${RelationName}" and "${Spread['target']['name']}" do not form a many-to-one or one-to-one relationship spread not possible`>
}
: ProcessSpreadNodeResult<Result>
: never
/**
* Helper type to process the result of a spread node.
*/
type ProcessSpreadNodeResult<Result> = Result extends Record<
string,
SelectQueryError<string> | null
>
? Result
: ExtractFirstProperty<Result> extends infer SpreadedObject
? ContainsNull<SpreadedObject> extends true
? Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] | null }, null>
: Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] }, null>
: SelectQueryError<'An error occurred spreading the object'>

View File

@@ -0,0 +1,115 @@
import type { GenericRelationship, GenericSchema, GenericTable, Prettify } from '../types'
export type { GenericRelationship, GenericSchema, GenericTable, Prettify }
export type AggregateWithoutColumnFunctions = 'count'
export type AggregateWithColumnFunctions =
| 'sum'
| 'avg'
| 'min'
| 'max'
| AggregateWithoutColumnFunctions
export type AggregateFunctions = AggregateWithColumnFunctions
export type Json =
| string
| number
| boolean
| null
| {
[key: string]: Json | undefined
}
| Json[]
type PostgresSQLNumberTypes = 'int2' | 'int4' | 'int8' | 'float4' | 'float8' | 'numeric'
type PostgresSQLStringTypes =
| 'bytea'
| 'bpchar'
| 'varchar'
| 'date'
| 'text'
| 'citext'
| 'time'
| 'timetz'
| 'timestamp'
| 'timestamptz'
| 'uuid'
| 'vector'
type SingleValuePostgreSQLTypes =
| PostgresSQLNumberTypes
| PostgresSQLStringTypes
| 'bool'
| 'json'
| 'jsonb'
| 'void'
| 'record'
| string
type ArrayPostgreSQLTypes = `_${SingleValuePostgreSQLTypes}`
type TypeScriptSingleValueTypes<T extends SingleValuePostgreSQLTypes> = T extends 'bool'
? boolean
: T extends PostgresSQLNumberTypes
? number
: T extends PostgresSQLStringTypes
? string
: T extends 'json' | 'jsonb'
? Json
: T extends 'void'
? undefined
: T extends 'record'
? Record<string, unknown>
: unknown
type StripUnderscore<T extends string> = T extends `_${infer U}` ? U : T
// Represents all possible PostgreSQL types, including array types, allow for custom types with 'string' in union
export type PostgreSQLTypes = SingleValuePostgreSQLTypes | ArrayPostgreSQLTypes
// Helper type to convert PostgreSQL types to their TypeScript equivalents
export type TypeScriptTypes<T extends PostgreSQLTypes> = T extends ArrayPostgreSQLTypes
? TypeScriptSingleValueTypes<StripUnderscore<Extract<T, SingleValuePostgreSQLTypes>>>[]
: TypeScriptSingleValueTypes<T>
// Utility types for working with unions
export type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (
k: infer I
) => void
? I
: never
export type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R
? R
: never
export type Push<T extends any[], V> = [...T, V]
// Converts a union type to a tuple type
export type UnionToTuple<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = N extends true
? []
: Push<UnionToTuple<Exclude<T, L>>, L>
export type UnionToArray<T> = UnionToTuple<T>
// Extracts the type of the first property in an object type
export type ExtractFirstProperty<T> = T extends { [K in keyof T]: infer U } ? U : never
// Type predicates
export type ContainsNull<T> = null extends T ? true : false
export type IsNonEmptyArray<T> = Exclude<T, undefined> extends readonly [unknown, ...unknown[]]
? true
: false
// Types for working with database schemas
export type TablesAndViews<Schema extends GenericSchema> = Schema['Tables'] &
Exclude<Schema['Views'], ''>
export type GetTableRelationships<
Schema extends GenericSchema,
Tname extends string
> = TablesAndViews<Schema>[Tname] extends { Relationships: infer R } ? R : false

View File

@@ -0,0 +1,580 @@
import { Ast } from './parser'
import {
AggregateFunctions,
ContainsNull,
GenericRelationship,
GenericSchema,
GenericTable,
IsNonEmptyArray,
TablesAndViews,
UnionToArray,
} from './types'
export type IsAny<T> = 0 extends 1 & T ? true : false
export type SelectQueryError<Message extends string> = { error: true } & Message
/*
** Because of pg-meta types generation there is some cases where a same relationship can be duplicated
** if the relation is across schemas and views this ensure that we dedup those relations and treat them
** as postgrest would.
** This is no longer the case and has been patched here: https://github.com/supabase/postgres-meta/pull/809
** But we still need this for retro-compatibilty with older generated types
** TODO: Remove this in next major version
*/
export type DeduplicateRelationships<T extends readonly unknown[]> = T extends readonly [
infer First,
...infer Rest
]
? First extends Rest[number]
? DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>
: [First, ...DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>]
: T
export type GetFieldNodeResultName<Field extends Ast.FieldNode> = Field['alias'] extends string
? Field['alias']
: Field['aggregateFunction'] extends AggregateFunctions
? Field['aggregateFunction']
: Field['name']
type FilterRelationNodes<Nodes extends Ast.Node[]> = UnionToArray<
{
[K in keyof Nodes]: Nodes[K] extends Ast.SpreadNode
? Nodes[K]['target']
: Nodes[K] extends Ast.FieldNode
? IsNonEmptyArray<Nodes[K]['children']> extends true
? Nodes[K]
: never
: never
}[number]
>
type ResolveRelationships<
Schema extends GenericSchema,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.FieldNode[]
> = UnionToArray<{
[K in keyof Nodes]: Nodes[K] extends Ast.FieldNode
? ResolveRelationship<Schema, Relationships, Nodes[K], RelationName> extends infer Relation
? Relation extends {
relation: {
referencedRelation: string
foreignKeyName: string
match: string
}
from: string
}
? {
referencedTable: Relation['relation']['referencedRelation']
fkName: Relation['relation']['foreignKeyName']
from: Relation['from']
match: Relation['relation']['match']
fieldName: GetFieldNodeResultName<Nodes[K]>
}
: Relation
: never
: never
}>[0]
/**
* Checks if a relation is implicitly referenced twice, requiring disambiguation
*/
type IsDoubleReference<T, U> = T extends {
referencedTable: infer RT
fieldName: infer FN
match: infer M
}
? M extends 'col' | 'refrel'
? U extends { referencedTable: RT; fieldName: FN; match: M }
? true
: false
: false
: false
/**
* Compares one element with all other elements in the array to find duplicates
*/
type CheckDuplicates<Arr extends any[], Current> = Arr extends [infer Head, ...infer Tail]
? IsDoubleReference<Current, Head> extends true
? Head | CheckDuplicates<Tail, Current> // Return the Head if duplicate
: CheckDuplicates<Tail, Current> // Otherwise, continue checking
: never
/**
* Iterates over the elements of the array to find duplicates
*/
type FindDuplicatesWithinDeduplicated<Arr extends any[]> = Arr extends [infer Head, ...infer Tail]
? CheckDuplicates<Tail, Head> | FindDuplicatesWithinDeduplicated<Tail>
: never
type FindDuplicates<Arr extends any[]> = FindDuplicatesWithinDeduplicated<
DeduplicateRelationships<Arr>
>
export type CheckDuplicateEmbededReference<
Schema extends GenericSchema,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.Node[]
> = FilterRelationNodes<Nodes> extends infer RelationsNodes
? RelationsNodes extends Ast.FieldNode[]
? ResolveRelationships<
Schema,
RelationName,
Relationships,
RelationsNodes
> extends infer ResolvedRels
? ResolvedRels extends unknown[]
? FindDuplicates<ResolvedRels> extends infer Duplicates
? Duplicates extends never
? false
: Duplicates extends { fieldName: infer FieldName }
? FieldName extends string
? {
[K in FieldName]: SelectQueryError<`table "${RelationName}" specified more than once use hinting for desambiguation`>
}
: false
: false
: false
: false
: false
: false
: false
/**
* Returns a boolean representing whether there is a foreign key referencing
* a given relation.
*/
type HasFKeyToFRel<FRelName, Relationships> = Relationships extends [infer R]
? R extends { referencedRelation: FRelName }
? true
: false
: Relationships extends [infer R, ...infer Rest]
? HasFKeyToFRel<FRelName, [R]> extends true
? true
: HasFKeyToFRel<FRelName, Rest>
: false
/**
* Checks if there is more than one relation to a given foreign relation name in the Relationships.
*/
type HasMultipleFKeysToFRelDeduplicated<FRelName, Relationships> = Relationships extends [
infer R,
...infer Rest
]
? R extends { referencedRelation: FRelName }
? HasFKeyToFRel<FRelName, Rest> extends true
? true
: HasMultipleFKeysToFRelDeduplicated<FRelName, Rest>
: HasMultipleFKeysToFRelDeduplicated<FRelName, Rest>
: false
type HasMultipleFKeysToFRel<
FRelName,
Relationships extends unknown[]
> = HasMultipleFKeysToFRelDeduplicated<FRelName, DeduplicateRelationships<Relationships>>
type CheckRelationshipError<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FoundRelation
> = FoundRelation extends SelectQueryError<string>
? FoundRelation
: // If the relation is a reverse relation with no hint (matching by name)
FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName
name: string
}
direction: 'reverse'
}
? RelatedRelationName extends string
? // We check if there is possible confusion with other relations with this table
HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true
? // If there is, postgrest will fail at runtime, and require desambiguation via hinting
SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`>
: FoundRelation
: never
: // Same check for forward relationships, but we must gather the relationships from the found relation
FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName
name: string
}
direction: 'forward'
from: infer From
}
? RelatedRelationName extends string
? From extends keyof TablesAndViews<Schema> & string
? HasMultipleFKeysToFRel<
RelatedRelationName,
TablesAndViews<Schema>[From]['Relationships']
> extends true
? SelectQueryError<`Could not embed because more than one relationship was found for '${From}' and '${RelatedRelationName}' you need to hint the column with ${From}!<columnName> ?`>
: FoundRelation
: never
: never
: FoundRelation
/**
* Resolves relationships for embedded resources and retrieves the referenced Table
*/
export type ResolveRelationship<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = ResolveReverseRelationship<
Schema,
Relationships,
Field,
CurrentTableOrView
> extends infer ReverseRelationship
? ReverseRelationship extends false
? CheckRelationshipError<
Schema,
Relationships,
CurrentTableOrView,
ResolveForwardRelationship<Schema, Field, CurrentTableOrView>
>
: CheckRelationshipError<Schema, Relationships, CurrentTableOrView, ReverseRelationship>
: never
/**
* Resolves reverse relationships (from children to parent)
*/
type ResolveReverseRelationship<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = FindFieldMatchingRelationships<Schema, Relationships, Field> extends infer FoundRelation
? FoundRelation extends never
? false
: FoundRelation extends { referencedRelation: infer RelatedRelationName }
? RelatedRelationName extends string
? RelatedRelationName extends keyof TablesAndViews<Schema>
? // If the relation was found via hinting we just return it without any more checks
FoundRelation extends { hint: string }
? {
referencedTable: TablesAndViews<Schema>[RelatedRelationName]
relation: FoundRelation
direction: 'reverse'
from: CurrentTableOrView
}
: // If the relation was found via implicit relation naming, we must ensure there is no conflicting matches
HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true
? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`>
: {
referencedTable: TablesAndViews<Schema>[RelatedRelationName]
relation: FoundRelation
direction: 'reverse'
from: CurrentTableOrView
}
: SelectQueryError<`Relation '${RelatedRelationName}' not found in schema.`>
: false
: false
: false
export type FindMatchingTableRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
value extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends keyof Schema['Tables']
? R extends { foreignKeyName: value }
? R & { match: 'fkname' }
: R extends { referencedRelation: value }
? R & { match: 'refrel' }
: R extends { columns: [value] }
? R & { match: 'col' }
: FindMatchingTableRelationships<Schema, Rest, value>
: FindMatchingTableRelationships<Schema, Rest, value>
: false
: false
: false
export type FindMatchingViewRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
value extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends keyof Schema['Views']
? R extends { foreignKeyName: value }
? R & { match: 'fkname' }
: R extends { referencedRelation: value }
? R & { match: 'refrel' }
: R extends { columns: [value] }
? R & { match: 'col' }
: FindMatchingViewRelationships<Schema, Rest, value>
: FindMatchingViewRelationships<Schema, Rest, value>
: false
: false
: false
export type FindMatchingHintTableRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
hint extends string,
name extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends name
? R extends { foreignKeyName: hint }
? R & { match: 'fkname' }
: R extends { referencedRelation: hint }
? R & { match: 'refrel' }
: R extends { columns: [hint] }
? R & { match: 'col' }
: FindMatchingHintTableRelationships<Schema, Rest, hint, name>
: FindMatchingHintTableRelationships<Schema, Rest, hint, name>
: false
: false
: false
export type FindMatchingHintViewRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
hint extends string,
name extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends name
? R extends { foreignKeyName: hint }
? R & { match: 'fkname' }
: R extends { referencedRelation: hint }
? R & { match: 'refrel' }
: R extends { columns: [hint] }
? R & { match: 'col' }
: FindMatchingHintViewRelationships<Schema, Rest, hint, name>
: FindMatchingHintViewRelationships<Schema, Rest, hint, name>
: false
: false
: false
type IsColumnsNullable<
Table extends Pick<GenericTable, 'Row'>,
Columns extends (keyof Table['Row'])[]
> = Columns extends [infer Column, ...infer Rest]
? Column extends keyof Table['Row']
? ContainsNull<Table['Row'][Column]> extends true
? true
: IsColumnsNullable<Table, Rest extends (keyof Table['Row'])[] ? Rest : []>
: false
: false
// Check weither or not a 1-1 relation is nullable by checking against the type of the columns
export type IsRelationNullable<
Table extends GenericTable,
Relation extends GenericRelationship
> = IsColumnsNullable<Table, Relation['columns']>
type TableForwardRelationships<
Schema extends GenericSchema,
TName
> = TName extends keyof TablesAndViews<Schema>
? UnionToArray<
RecursivelyFindRelationships<Schema, TName, keyof TablesAndViews<Schema>>
> extends infer R
? R extends (GenericRelationship & { from: keyof TablesAndViews<Schema> })[]
? R
: []
: []
: []
type RecursivelyFindRelationships<
Schema extends GenericSchema,
TName,
Keys extends keyof TablesAndViews<Schema>
> = Keys extends infer K
? K extends keyof TablesAndViews<Schema>
? FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K> extends never
? RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>>
:
| FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K>
| RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>>
: false
: false
type FilterRelationships<R, TName, From> = R extends readonly (infer Rel)[]
? Rel extends { referencedRelation: TName }
? Rel & { from: From }
: never
: never
export type ResolveForwardRelationship<
Schema extends GenericSchema,
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = FindFieldMatchingRelationships<
Schema,
TablesAndViews<Schema>[Field['name']]['Relationships'],
Ast.FieldNode & { name: CurrentTableOrView; hint: Field['hint'] }
> extends infer FoundByName
? FoundByName extends GenericRelationship
? {
referencedTable: TablesAndViews<Schema>[Field['name']]
relation: FoundByName
direction: 'forward'
from: Field['name']
type: 'found-by-name'
}
: FindFieldMatchingRelationships<
Schema,
TableForwardRelationships<Schema, CurrentTableOrView>,
Field
> extends infer FoundByMatch
? FoundByMatch extends GenericRelationship & {
from: keyof TablesAndViews<Schema>
}
? {
referencedTable: TablesAndViews<Schema>[FoundByMatch['from']]
relation: FoundByMatch
direction: 'forward'
from: CurrentTableOrView
type: 'found-by-match'
}
: FindJoinTableRelationship<
Schema,
CurrentTableOrView,
Field['name']
> extends infer FoundByJoinTable
? FoundByJoinTable extends GenericRelationship
? {
referencedTable: TablesAndViews<Schema>[FoundByJoinTable['referencedRelation']]
relation: FoundByJoinTable & { match: 'refrel' }
direction: 'forward'
from: CurrentTableOrView
type: 'found-by-join-table'
}
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
/**
* Given a CurrentTableOrView, finds all join tables to this relation.
* For example, if products and categories are linked via product_categories table:
*
* @example
* Given:
* - CurrentTableView = 'products'
* - FieldName = "categories"
*
* It should return this relationship from product_categories:
* {
* foreignKeyName: "product_categories_category_id_fkey",
* columns: ["category_id"],
* isOneToOne: false,
* referencedRelation: "categories",
* referencedColumns: ["id"]
* }
*/
type ResolveJoinTableRelationship<
Schema extends GenericSchema,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FieldName extends string
> = {
[TableName in keyof TablesAndViews<Schema>]: DeduplicateRelationships<
TablesAndViews<Schema>[TableName]['Relationships']
> extends readonly (infer Rel)[]
? Rel extends { referencedRelation: CurrentTableOrView }
? DeduplicateRelationships<
TablesAndViews<Schema>[TableName]['Relationships']
> extends readonly (infer OtherRel)[]
? OtherRel extends { referencedRelation: FieldName }
? OtherRel
: never
: never
: never
: never
}[keyof TablesAndViews<Schema>]
export type FindJoinTableRelationship<
Schema extends GenericSchema,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FieldName extends string
> = ResolveJoinTableRelationship<Schema, CurrentTableOrView, FieldName> extends infer Result
? [Result] extends [never]
? false
: Result
: never
/**
* Finds a matching relationship based on the FieldNode's name and optional hint.
*/
export type FindFieldMatchingRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode
> = Field extends { hint: string }
? FindMatchingHintTableRelationships<
Schema,
Relationships,
Field['hint'],
Field['name']
> extends GenericRelationship
? FindMatchingHintTableRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-table-via-hint'
hint: Field['hint']
}
: FindMatchingHintViewRelationships<
Schema,
Relationships,
Field['hint'],
Field['name']
> extends GenericRelationship
? FindMatchingHintViewRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-view-via-hint'
hint: Field['hint']
}
: SelectQueryError<'Failed to find matching relation via hint'>
: FindMatchingTableRelationships<Schema, Relationships, Field['name']> extends GenericRelationship
? FindMatchingTableRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-table-via-name'
name: Field['name']
}
: FindMatchingViewRelationships<Schema, Relationships, Field['name']> extends GenericRelationship
? FindMatchingViewRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-view-via-name'
name: Field['name']
}
: SelectQueryError<'Failed to find matching relation via name'>
export type JsonPathToAccessor<Path extends string> = Path extends `${infer P1}->${infer P2}`
? P2 extends `>${infer Rest}` // Handle ->> operator
? JsonPathToAccessor<`${P1}.${Rest}`>
: P2 extends string // Handle -> operator
? JsonPathToAccessor<`${P1}.${P2}`>
: Path
: Path extends `>${infer Rest}` // Clean up any remaining > characters
? JsonPathToAccessor<Rest>
: Path extends `${infer P1}::${infer _}` // Handle type casting
? JsonPathToAccessor<P1>
: Path extends `${infer P1}${')' | ','}${infer _}` // Handle closing parenthesis and comma
? P1
: Path
export type JsonPathToType<T, Path extends string> = Path extends ''
? T
: ContainsNull<T> extends true
? JsonPathToType<Exclude<T, null>, Path>
: Path extends `${infer Key}.${infer Rest}`
? Key extends keyof T
? JsonPathToType<T[Key], Rest>
: never
: Path extends keyof T
? T[Path]
: never
export type IsStringUnion<T> = string extends T
? false
: T extends string
? [T] extends [never]
? false
: true
: false

188
node_modules/@supabase/postgrest-js/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,188 @@
import PostgrestError from './PostgrestError'
import { ContainsNull } from './select-query-parser/types'
import { SelectQueryError } from './select-query-parser/utils'
export type Fetch = typeof fetch
/**
* Response format
*
* {@link https://github.com/supabase/supabase-js/issues/32}
*/
interface PostgrestResponseBase {
status: number
statusText: string
}
export interface PostgrestResponseSuccess<T> extends PostgrestResponseBase {
error: null
data: T
count: number | null
}
export interface PostgrestResponseFailure extends PostgrestResponseBase {
error: PostgrestError
data: null
count: null
}
// TODO: in v3:
// - remove PostgrestResponse and PostgrestMaybeSingleResponse
// - rename PostgrestSingleResponse to PostgrestResponse
export type PostgrestSingleResponse<T> = PostgrestResponseSuccess<T> | PostgrestResponseFailure
export type PostgrestMaybeSingleResponse<T> = PostgrestSingleResponse<T | null>
export type PostgrestResponse<T> = PostgrestSingleResponse<T[]>
export type GenericRelationship = {
foreignKeyName: string
columns: string[]
isOneToOne?: boolean
referencedRelation: string
referencedColumns: string[]
}
export type GenericTable = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericUpdatableView = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericNonUpdatableView = {
Row: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericView = GenericUpdatableView | GenericNonUpdatableView
export type GenericFunction = {
Args: Record<string, unknown>
Returns: unknown
}
export type GenericSchema = {
Tables: Record<string, GenericTable>
Views: Record<string, GenericView>
Functions: Record<string, GenericFunction>
}
// https://twitter.com/mattpocockuk/status/1622730173446557697
export type Prettify<T> = { [K in keyof T]: T[K] } & {}
// https://github.com/sindresorhus/type-fest
export type SimplifyDeep<Type, ExcludeType = never> = ConditionalSimplifyDeep<
Type,
ExcludeType | NonRecursiveType | Set<unknown> | Map<unknown, unknown>,
object
>
type ConditionalSimplifyDeep<
Type,
ExcludeType = never,
IncludeType = unknown
> = Type extends ExcludeType
? Type
: Type extends IncludeType
? { [TypeKey in keyof Type]: ConditionalSimplifyDeep<Type[TypeKey], ExcludeType, IncludeType> }
: Type
type NonRecursiveType = BuiltIns | Function | (new (...arguments_: any[]) => unknown)
type BuiltIns = Primitive | void | Date | RegExp
type Primitive = null | undefined | string | number | boolean | symbol | bigint
export type IsValidResultOverride<Result, NewResult, ErrorResult, ErrorNewResult> =
Result extends any[]
? NewResult extends any[]
? // Both are arrays - valid
true
: ErrorResult
: NewResult extends any[]
? ErrorNewResult
: // Neither are arrays - valid
true
/**
* Utility type to check if array types match between Result and NewResult.
* Returns either the valid NewResult type or an error message type.
*/
export type CheckMatchingArrayTypes<Result, NewResult> =
// If the result is a QueryError we allow the user to override anyway
Result extends SelectQueryError<string>
? NewResult
: IsValidResultOverride<
Result,
NewResult,
{
Error: 'Type mismatch: Cannot cast array result to a single object. Use .overrideTypes<Array<YourType>> or .returns<Array<YourType>> (deprecated) for array results or .single() to convert the result to a single object'
},
{
Error: 'Type mismatch: Cannot cast single object to array type. Remove Array wrapper from return type or make sure you are not using .single() up in the calling chain'
}
> extends infer ValidationResult
? ValidationResult extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? NewResult | null
: NewResult
: // contains the error
ValidationResult
: never
type Simplify<T> = T extends object ? { [K in keyof T]: T[K] } : T
// Extract only explicit (non-index-signature) keys.
type ExplicitKeys<T> = {
[K in keyof T]: string extends K ? never : K
}[keyof T]
type MergeExplicit<New, Row> = {
// We merge all the explicit keys which allows merge and override of types like
// { [key: string]: unknown } and { someSpecificKey: boolean }
[K in ExplicitKeys<New> | ExplicitKeys<Row>]: K extends keyof New
? K extends keyof Row
? Row[K] extends SelectQueryError<string>
? New[K]
: // Check if the override is on a embedded relation (array)
New[K] extends any[]
? Row[K] extends any[]
? Array<Simplify<MergeDeep<NonNullable<New[K][number]>, NonNullable<Row[K][number]>>>>
: New[K]
: // Check if both properties are objects omitting a potential null union
IsPlainObject<NonNullable<New[K]>> extends true
? IsPlainObject<NonNullable<Row[K]>> extends true
? // If they are, use the new override as source of truth for the optionality
ContainsNull<New[K]> extends true
? // If the override wants to preserve optionality
Simplify<MergeDeep<NonNullable<New[K]>, NonNullable<Row[K]>>> | null
: // If the override wants to enforce non-null result
Simplify<MergeDeep<New[K], NonNullable<Row[K]>>>
: New[K] // Override with New type if Row isn't an object
: New[K] // Override primitives with New type
: New[K] // Add new properties from New
: K extends keyof Row
? Row[K] // Keep existing properties not in New
: never
}
type MergeDeep<New, Row> = Simplify<
MergeExplicit<New, Row> &
// Intersection here is to restore dynamic keys into the merging result
// eg:
// {[key: number]: string}
// or Record<string, number | null>
(string extends keyof Row ? { [K: string]: Row[string] } : {})
>
// Helper to check if a type is a plain object (not an array)
type IsPlainObject<T> = T extends any[] ? false : T extends object ? true : false
// Merge the new result with the original (Result) when merge option is true.
// If NewResult is an array, merge each element.
export type MergePartialResult<NewResult, Result, Options> = Options extends { merge: true }
? Result extends any[]
? NewResult extends any[]
? Array<Simplify<MergeDeep<NewResult[number], Result[number]>>>
: never
: Simplify<MergeDeep<NewResult, Result>>
: NewResult

1
node_modules/@supabase/postgrest-js/src/version.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export const version = '1.19.4'