main repo

This commit is contained in:
Basilosaurusrex
2025-11-24 18:09:40 +01:00
parent b636ee5e70
commit f027651f9b
34146 changed files with 4436636 additions and 0 deletions

21
node_modules/@supabase/postgrest-js/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Supabase
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

54
node_modules/@supabase/postgrest-js/README.md generated vendored Normal file
View File

@@ -0,0 +1,54 @@
# `postgrest-js`
[![Build](https://github.com/supabase/postgrest-js/workflows/CI/badge.svg)](https://github.com/supabase/postgrest-js/actions?query=branch%3Amaster)
[![Package](https://img.shields.io/npm/v/@supabase/postgrest-js)](https://www.npmjs.com/package/@supabase/postgrest-js)
[![License: MIT](https://img.shields.io/npm/l/@supabase/postgrest-js)](#license)
Isomorphic JavaScript client for [PostgREST](https://postgrest.org). The goal of this library is to make an "ORM-like" restful interface.
Full documentation can be found [here](https://supabase.github.io/postgrest-js/v2).
### Quick start
Install
```bash
npm install @supabase/postgrest-js
```
Usage
```js
import { PostgrestClient } from '@supabase/postgrest-js'
const REST_URL = 'http://localhost:3000'
const postgrest = new PostgrestClient(REST_URL)
```
- select(): https://supabase.com/docs/reference/javascript/select
- insert(): https://supabase.com/docs/reference/javascript/insert
- update(): https://supabase.com/docs/reference/javascript/update
- delete(): https://supabase.com/docs/reference/javascript/delete
#### Custom `fetch` implementation
`postgrest-js` uses the [`cross-fetch`](https://www.npmjs.com/package/cross-fetch) library to make HTTP requests, but an alternative `fetch` implementation can be provided as an option. This is most useful in environments where `cross-fetch` is not compatible, for instance Cloudflare Workers:
```js
import { PostgrestClient } from '@supabase/postgrest-js'
const REST_URL = 'http://localhost:3000'
const postgrest = new PostgrestClient(REST_URL, {
fetch: (...args) => fetch(...args),
})
```
## License
This repo is licensed under MIT License.
## Sponsors
We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products dont exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone.
[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase)

View File

@@ -0,0 +1,61 @@
import type { Fetch, PostgrestSingleResponse, PostgrestResponseSuccess, CheckMatchingArrayTypes, MergePartialResult, IsValidResultOverride } from './types';
import { ContainsNull } from './select-query-parser/types';
export default abstract class PostgrestBuilder<Result, ThrowOnError extends boolean = false> implements PromiseLike<ThrowOnError extends true ? PostgrestResponseSuccess<Result> : PostgrestSingleResponse<Result>> {
protected method: 'GET' | 'HEAD' | 'POST' | 'PATCH' | 'DELETE';
protected url: URL;
protected headers: Record<string, string>;
protected schema?: string;
protected body?: unknown;
protected shouldThrowOnError: boolean;
protected signal?: AbortSignal;
protected fetch: Fetch;
protected isMaybeSingle: boolean;
constructor(builder: PostgrestBuilder<Result>);
/**
* If there's an error with the query, throwOnError will reject the promise by
* throwing the error instead of returning it as part of a successful response.
*
* {@link https://github.com/supabase/supabase-js/issues/92}
*/
throwOnError(): this & PostgrestBuilder<Result, true>;
/**
* Set an HTTP header for the request.
*/
setHeader(name: string, value: string): this;
then<TResult1 = ThrowOnError extends true ? PostgrestResponseSuccess<Result> : PostgrestSingleResponse<Result>, TResult2 = never>(onfulfilled?: ((value: ThrowOnError extends true ? PostgrestResponseSuccess<Result> : PostgrestSingleResponse<Result>) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null): PromiseLike<TResult1 | TResult2>;
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestBuilder<CheckMatchingArrayTypes<Result, NewResult>, ThrowOnError>;
/**
* Override the type of the returned `data` field in the response.
*
* @typeParam NewResult - The new type to cast the response data to
* @typeParam Options - Optional type configuration (defaults to { merge: true })
* @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true)
* @example
* ```typescript
* // Merge with existing types (default behavior)
* const query = supabase
* .from('users')
* .select()
* .overrideTypes<{ custom_field: string }>()
*
* // Replace existing types completely
* const replaceQuery = supabase
* .from('users')
* .select()
* .overrideTypes<{ id: number; name: string }, { merge: false }>()
* ```
* @returns A PostgrestBuilder instance with the new type
*/
overrideTypes<NewResult, Options extends {
merge?: boolean;
} = {
merge: true;
}>(): PostgrestBuilder<IsValidResultOverride<Result, NewResult, false, false> extends true ? ContainsNull<Result> extends true ? MergePartialResult<NewResult, NonNullable<Result>, Options> | null : MergePartialResult<NewResult, Result, Options> : CheckMatchingArrayTypes<Result, NewResult>, ThrowOnError>;
}
//# sourceMappingURL=PostgrestBuilder.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestBuilder.d.ts","sourceRoot":"","sources":["../../src/PostgrestBuilder.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACV,KAAK,EACL,uBAAuB,EACvB,wBAAwB,EACxB,uBAAuB,EACvB,kBAAkB,EAClB,qBAAqB,EACtB,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAA;AAE1D,MAAM,CAAC,OAAO,CAAC,QAAQ,OAAO,gBAAgB,CAAC,MAAM,EAAE,YAAY,SAAS,OAAO,GAAG,KAAK,CACzF,YACE,WAAW,CACT,YAAY,SAAS,IAAI,GAAG,wBAAwB,CAAC,MAAM,CAAC,GAAG,uBAAuB,CAAC,MAAM,CAAC,CAC/F;IAEH,SAAS,CAAC,MAAM,EAAE,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,QAAQ,CAAA;IAC9D,SAAS,CAAC,GAAG,EAAE,GAAG,CAAA;IAClB,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACzC,SAAS,CAAC,MAAM,CAAC,EAAE,MAAM,CAAA;IACzB,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,CAAA;IACxB,SAAS,CAAC,kBAAkB,UAAQ;IACpC,SAAS,CAAC,MAAM,CAAC,EAAE,WAAW,CAAA;IAC9B,SAAS,CAAC,KAAK,EAAE,KAAK,CAAA;IACtB,SAAS,CAAC,aAAa,EAAE,OAAO,CAAA;gBAEpB,OAAO,EAAE,gBAAgB,CAAC,MAAM,CAAC;IAmB7C;;;;;OAKG;IACH,YAAY,IAAI,IAAI,GAAG,gBAAgB,CAAC,MAAM,EAAE,IAAI,CAAC;IAKrD;;OAEG;IACH,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAM5C,IAAI,CACF,QAAQ,GAAG,YAAY,SAAS,IAAI,GAChC,wBAAwB,CAAC,MAAM,CAAC,GAChC,uBAAuB,CAAC,MAAM,CAAC,EACnC,QAAQ,GAAG,KAAK,EAEhB,WAAW,CAAC,EACR,CAAC,CACC,KAAK,EAAE,YAAY,SAAS,IAAI,GAC5B,wBAAwB,CAAC,MAAM,CAAC,GAChC,uBAAuB,CAAC,MAAM,CAAC,KAChC,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAC,GACtC,SAAS,GACT,IAAI,EACR,UAAU,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,GAClF,WAAW,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAwInC;;;;;OAKG;IACH,OAAO,CAAC,SAAS,KAAK,gBAAgB,CAAC,uBAAuB,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,YAAY,CAAC;IAQhG;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,aAAa,CACX,SAAS,EACT,OAAO,SAAS;QAAE,KAAK,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG;QAAE,KAAK,EAAE,IAAI,CAAA;KAAE,KAClD,gBAAgB,CACnB,qBAAqB,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,SAAS,IAAI,GAE/D,YAAY,CAAC,MAAM,CAAC,SAAS,IAAI,GAC/B,kBAAkB,CAAC,SAAS,EAAE,WAAW,CAAC,MAAM,CAAC,EAAE,OAAO,CAAC,GAAG,IAAI,GAClE,kBAAkB,CAAC,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC,GAChD,uBAAuB,CAAC,MAAM,EAAE,SAAS,CAAC,EAC9C,YAAY,CACb;CAWF"}

View File

@@ -0,0 +1,221 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
// @ts-ignore
const node_fetch_1 = __importDefault(require("@supabase/node-fetch"));
const PostgrestError_1 = __importDefault(require("./PostgrestError"));
class PostgrestBuilder {
constructor(builder) {
this.shouldThrowOnError = false;
this.method = builder.method;
this.url = builder.url;
this.headers = builder.headers;
this.schema = builder.schema;
this.body = builder.body;
this.shouldThrowOnError = builder.shouldThrowOnError;
this.signal = builder.signal;
this.isMaybeSingle = builder.isMaybeSingle;
if (builder.fetch) {
this.fetch = builder.fetch;
}
else if (typeof fetch === 'undefined') {
this.fetch = node_fetch_1.default;
}
else {
this.fetch = fetch;
}
}
/**
* If there's an error with the query, throwOnError will reject the promise by
* throwing the error instead of returning it as part of a successful response.
*
* {@link https://github.com/supabase/supabase-js/issues/92}
*/
throwOnError() {
this.shouldThrowOnError = true;
return this;
}
/**
* Set an HTTP header for the request.
*/
setHeader(name, value) {
this.headers = Object.assign({}, this.headers);
this.headers[name] = value;
return this;
}
then(onfulfilled, onrejected) {
// https://postgrest.org/en/stable/api.html#switching-schemas
if (this.schema === undefined) {
// skip
}
else if (['GET', 'HEAD'].includes(this.method)) {
this.headers['Accept-Profile'] = this.schema;
}
else {
this.headers['Content-Profile'] = this.schema;
}
if (this.method !== 'GET' && this.method !== 'HEAD') {
this.headers['Content-Type'] = 'application/json';
}
// NOTE: Invoke w/o `this` to avoid illegal invocation error.
// https://github.com/supabase/postgrest-js/pull/247
const _fetch = this.fetch;
let res = _fetch(this.url.toString(), {
method: this.method,
headers: this.headers,
body: JSON.stringify(this.body),
signal: this.signal,
}).then(async (res) => {
var _a, _b, _c;
let error = null;
let data = null;
let count = null;
let status = res.status;
let statusText = res.statusText;
if (res.ok) {
if (this.method !== 'HEAD') {
const body = await res.text();
if (body === '') {
// Prefer: return=minimal
}
else if (this.headers['Accept'] === 'text/csv') {
data = body;
}
else if (this.headers['Accept'] &&
this.headers['Accept'].includes('application/vnd.pgrst.plan+text')) {
data = body;
}
else {
data = JSON.parse(body);
}
}
const countHeader = (_a = this.headers['Prefer']) === null || _a === void 0 ? void 0 : _a.match(/count=(exact|planned|estimated)/);
const contentRange = (_b = res.headers.get('content-range')) === null || _b === void 0 ? void 0 : _b.split('/');
if (countHeader && contentRange && contentRange.length > 1) {
count = parseInt(contentRange[1]);
}
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.isMaybeSingle && this.method === 'GET' && Array.isArray(data)) {
if (data.length > 1) {
error = {
// https://github.com/PostgREST/postgrest/blob/a867d79c42419af16c18c3fb019eba8df992626f/src/PostgREST/Error.hs#L553
code: 'PGRST116',
details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
hint: null,
message: 'JSON object requested, multiple (or no) rows returned',
};
data = null;
count = null;
status = 406;
statusText = 'Not Acceptable';
}
else if (data.length === 1) {
data = data[0];
}
else {
data = null;
}
}
}
else {
const body = await res.text();
try {
error = JSON.parse(body);
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (Array.isArray(error) && res.status === 404) {
data = [];
error = null;
status = 200;
statusText = 'OK';
}
}
catch (_d) {
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (res.status === 404 && body === '') {
status = 204;
statusText = 'No Content';
}
else {
error = {
message: body,
};
}
}
if (error && this.isMaybeSingle && ((_c = error === null || error === void 0 ? void 0 : error.details) === null || _c === void 0 ? void 0 : _c.includes('0 rows'))) {
error = null;
status = 200;
statusText = 'OK';
}
if (error && this.shouldThrowOnError) {
throw new PostgrestError_1.default(error);
}
}
const postgrestResponse = {
error,
data,
count,
status,
statusText,
};
return postgrestResponse;
});
if (!this.shouldThrowOnError) {
res = res.catch((fetchError) => {
var _a, _b, _c;
return ({
error: {
message: `${(_a = fetchError === null || fetchError === void 0 ? void 0 : fetchError.name) !== null && _a !== void 0 ? _a : 'FetchError'}: ${fetchError === null || fetchError === void 0 ? void 0 : fetchError.message}`,
details: `${(_b = fetchError === null || fetchError === void 0 ? void 0 : fetchError.stack) !== null && _b !== void 0 ? _b : ''}`,
hint: '',
code: `${(_c = fetchError === null || fetchError === void 0 ? void 0 : fetchError.code) !== null && _c !== void 0 ? _c : ''}`,
},
data: null,
count: null,
status: 0,
statusText: '',
});
});
}
return res.then(onfulfilled, onrejected);
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns() {
/* istanbul ignore next */
return this;
}
/**
* Override the type of the returned `data` field in the response.
*
* @typeParam NewResult - The new type to cast the response data to
* @typeParam Options - Optional type configuration (defaults to { merge: true })
* @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true)
* @example
* ```typescript
* // Merge with existing types (default behavior)
* const query = supabase
* .from('users')
* .select()
* .overrideTypes<{ custom_field: string }>()
*
* // Replace existing types completely
* const replaceQuery = supabase
* .from('users')
* .select()
* .overrideTypes<{ id: number; name: string }, { merge: false }>()
* ```
* @returns A PostgrestBuilder instance with the new type
*/
overrideTypes() {
return this;
}
}
exports.default = PostgrestBuilder;
//# sourceMappingURL=PostgrestBuilder.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,72 @@
import PostgrestQueryBuilder from './PostgrestQueryBuilder';
import PostgrestFilterBuilder from './PostgrestFilterBuilder';
import { Fetch, GenericSchema } from './types';
/**
* PostgREST client.
*
* @typeParam Database - Types for the schema from the [type
* generator](https://supabase.com/docs/reference/javascript/next/typescript-support)
*
* @typeParam SchemaName - Postgres schema to switch to. Must be a string
* literal, the same one passed to the constructor. If the schema is not
* `"public"`, this must be supplied manually.
*/
export default class PostgrestClient<Database = any, SchemaName extends string & keyof Database = 'public' extends keyof Database ? 'public' : string & keyof Database, Schema extends GenericSchema = Database[SchemaName] extends GenericSchema ? Database[SchemaName] : any> {
url: string;
headers: Record<string, string>;
schemaName?: SchemaName;
fetch?: Fetch;
/**
* Creates a PostgREST client.
*
* @param url - URL of the PostgREST endpoint
* @param options - Named parameters
* @param options.headers - Custom headers
* @param options.schema - Postgres schema to switch to
* @param options.fetch - Custom fetch
*/
constructor(url: string, { headers, schema, fetch, }?: {
headers?: Record<string, string>;
schema?: SchemaName;
fetch?: Fetch;
});
from<TableName extends string & keyof Schema['Tables'], Table extends Schema['Tables'][TableName]>(relation: TableName): PostgrestQueryBuilder<Schema, Table, TableName>;
from<ViewName extends string & keyof Schema['Views'], View extends Schema['Views'][ViewName]>(relation: ViewName): PostgrestQueryBuilder<Schema, View, ViewName>;
/**
* Select a schema to query or perform an function (rpc) call.
*
* The schema needs to be on the list of exposed schemas inside Supabase.
*
* @param schema - The schema to query
*/
schema<DynamicSchema extends string & keyof Database>(schema: DynamicSchema): PostgrestClient<Database, DynamicSchema, Database[DynamicSchema] extends GenericSchema ? Database[DynamicSchema] : any>;
/**
* Perform a function call.
*
* @param fn - The function name to call
* @param args - The arguments to pass to the function call
* @param options - Named parameters
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
* @param options.get - When set to `true`, the function will be called with
* read-only access mode.
* @param options.count - Count algorithm to use to count rows returned by the
* function. Only applicable for [set-returning
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
rpc<FnName extends string & keyof Schema['Functions'], Fn extends Schema['Functions'][FnName]>(fn: FnName, args?: Fn['Args'], { head, get, count, }?: {
head?: boolean;
get?: boolean;
count?: 'exact' | 'planned' | 'estimated';
}): PostgrestFilterBuilder<Schema, Fn['Returns'] extends any[] ? Fn['Returns'][number] extends Record<string, unknown> ? Fn['Returns'][number] : never : never, Fn['Returns'], FnName, null>;
}
//# sourceMappingURL=PostgrestClient.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestClient.d.ts","sourceRoot":"","sources":["../../src/PostgrestClient.ts"],"names":[],"mappings":"AAAA,OAAO,qBAAqB,MAAM,yBAAyB,CAAA;AAC3D,OAAO,sBAAsB,MAAM,0BAA0B,CAAA;AAG7D,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,MAAM,SAAS,CAAA;AAE9C;;;;;;;;;GASG;AACH,MAAM,CAAC,OAAO,OAAO,eAAe,CAClC,QAAQ,GAAG,GAAG,EACd,UAAU,SAAS,MAAM,GAAG,MAAM,QAAQ,GAAG,QAAQ,SAAS,MAAM,QAAQ,GACxE,QAAQ,GACR,MAAM,GAAG,MAAM,QAAQ,EAC3B,MAAM,SAAS,aAAa,GAAG,QAAQ,CAAC,UAAU,CAAC,SAAS,aAAa,GACrE,QAAQ,CAAC,UAAU,CAAC,GACpB,GAAG;IAEP,GAAG,EAAE,MAAM,CAAA;IACX,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC/B,UAAU,CAAC,EAAE,UAAU,CAAA;IACvB,KAAK,CAAC,EAAE,KAAK,CAAA;IAGb;;;;;;;;OAQG;gBAED,GAAG,EAAE,MAAM,EACX,EACE,OAAY,EACZ,MAAM,EACN,KAAK,GACN,GAAE;QACD,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QAChC,MAAM,CAAC,EAAE,UAAU,CAAA;QACnB,KAAK,CAAC,EAAE,KAAK,CAAA;KACT;IAQR,IAAI,CACF,SAAS,SAAS,MAAM,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC,EACjD,KAAK,SAAS,MAAM,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,EACzC,QAAQ,EAAE,SAAS,GAAG,qBAAqB,CAAC,MAAM,EAAE,KAAK,EAAE,SAAS,CAAC;IACvE,IAAI,CAAC,QAAQ,SAAS,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,EAAE,IAAI,SAAS,MAAM,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,EAC1F,QAAQ,EAAE,QAAQ,GACjB,qBAAqB,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,CAAC;IAehD;;;;;;OAMG;IACH,MAAM,CAAC,aAAa,SAAS,MAAM,GAAG,MAAM,QAAQ,EAClD,MAAM,EAAE,aAAa,GACpB,eAAe,CAChB,QAAQ,EACR,aAAa,EACb,QAAQ,CAAC,aAAa,CAAC,SAAS,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,GAAG,CAC9E;IAQD;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,GAAG,CAAC,MAAM,SAAS,MAAM,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,EAAE,EAAE,SAAS,MAAM,CAAC,WAAW,CAAC,CAAC,MAAM,CAAC,EAC3F,EAAE,EAAE,MAAM,EACV,IAAI,GAAE,EAAE,CAAC,MAAM,CAAM,EACrB,EACE,IAAY,EACZ,GAAW,EACX,KAAK,GACN,GAAE;QACD,IAAI,CAAC,EAAE,OAAO,CAAA;QACd,GAAG,CAAC,EAAE,OAAO,CAAA;QACb,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;KACrC,GACL,sBAAsB,CACvB,MAAM,EACN,EAAE,CAAC,SAAS,CAAC,SAAS,GAAG,EAAE,GACvB,EAAE,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GACnD,EAAE,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,GACrB,KAAK,GACP,KAAK,EACT,EAAE,CAAC,SAAS,CAAC,EACb,MAAM,EACN,IAAI,CACL;CAmCF"}

View File

@@ -0,0 +1,122 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const PostgrestQueryBuilder_1 = __importDefault(require("./PostgrestQueryBuilder"));
const PostgrestFilterBuilder_1 = __importDefault(require("./PostgrestFilterBuilder"));
const constants_1 = require("./constants");
/**
* PostgREST client.
*
* @typeParam Database - Types for the schema from the [type
* generator](https://supabase.com/docs/reference/javascript/next/typescript-support)
*
* @typeParam SchemaName - Postgres schema to switch to. Must be a string
* literal, the same one passed to the constructor. If the schema is not
* `"public"`, this must be supplied manually.
*/
class PostgrestClient {
// TODO: Add back shouldThrowOnError once we figure out the typings
/**
* Creates a PostgREST client.
*
* @param url - URL of the PostgREST endpoint
* @param options - Named parameters
* @param options.headers - Custom headers
* @param options.schema - Postgres schema to switch to
* @param options.fetch - Custom fetch
*/
constructor(url, { headers = {}, schema, fetch, } = {}) {
this.url = url;
this.headers = Object.assign(Object.assign({}, constants_1.DEFAULT_HEADERS), headers);
this.schemaName = schema;
this.fetch = fetch;
}
/**
* Perform a query on a table or a view.
*
* @param relation - The table or view name to query
*/
from(relation) {
const url = new URL(`${this.url}/${relation}`);
return new PostgrestQueryBuilder_1.default(url, {
headers: Object.assign({}, this.headers),
schema: this.schemaName,
fetch: this.fetch,
});
}
/**
* Select a schema to query or perform an function (rpc) call.
*
* The schema needs to be on the list of exposed schemas inside Supabase.
*
* @param schema - The schema to query
*/
schema(schema) {
return new PostgrestClient(this.url, {
headers: this.headers,
schema,
fetch: this.fetch,
});
}
/**
* Perform a function call.
*
* @param fn - The function name to call
* @param args - The arguments to pass to the function call
* @param options - Named parameters
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
* @param options.get - When set to `true`, the function will be called with
* read-only access mode.
* @param options.count - Count algorithm to use to count rows returned by the
* function. Only applicable for [set-returning
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
rpc(fn, args = {}, { head = false, get = false, count, } = {}) {
let method;
const url = new URL(`${this.url}/rpc/${fn}`);
let body;
if (head || get) {
method = head ? 'HEAD' : 'GET';
Object.entries(args)
// params with undefined value needs to be filtered out, otherwise it'll
// show up as `?param=undefined`
.filter(([_, value]) => value !== undefined)
// array values need special syntax
.map(([name, value]) => [name, Array.isArray(value) ? `{${value.join(',')}}` : `${value}`])
.forEach(([name, value]) => {
url.searchParams.append(name, value);
});
}
else {
method = 'POST';
body = args;
}
const headers = Object.assign({}, this.headers);
if (count) {
headers['Prefer'] = `count=${count}`;
}
return new PostgrestFilterBuilder_1.default({
method,
url,
headers,
schema: this.schemaName,
body,
fetch: this.fetch,
allowEmpty: false,
});
}
}
exports.default = PostgrestClient;
//# sourceMappingURL=PostgrestClient.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestClient.js","sourceRoot":"","sources":["../../src/PostgrestClient.ts"],"names":[],"mappings":";;;;;AAAA,oFAA2D;AAC3D,sFAA6D;AAE7D,2CAA6C;AAG7C;;;;;;;;;GASG;AACH,MAAqB,eAAe;IAclC,mEAAmE;IACnE;;;;;;;;OAQG;IACH,YACE,GAAW,EACX,EACE,OAAO,GAAG,EAAE,EACZ,MAAM,EACN,KAAK,MAKH,EAAE;QAEN,IAAI,CAAC,GAAG,GAAG,GAAG,CAAA;QACd,IAAI,CAAC,OAAO,mCAAQ,2BAAe,GAAK,OAAO,CAAE,CAAA;QACjD,IAAI,CAAC,UAAU,GAAG,MAAM,CAAA;QACxB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IASD;;;;OAIG;IACH,IAAI,CAAC,QAAgB;QACnB,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,QAAQ,EAAE,CAAC,CAAA;QAC9C,OAAO,IAAI,+BAAqB,CAAC,GAAG,EAAE;YACpC,OAAO,oBAAO,IAAI,CAAC,OAAO,CAAE;YAC5B,MAAM,EAAE,IAAI,CAAC,UAAU;YACvB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAA;IACJ,CAAC;IAED;;;;;;OAMG;IACH,MAAM,CACJ,MAAqB;QAMrB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE;YACnC,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM;YACN,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAA;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,GAAG,CACD,EAAU,EACV,OAAmB,EAAE,EACrB,EACE,IAAI,GAAG,KAAK,EACZ,GAAG,GAAG,KAAK,EACX,KAAK,MAKH,EAAE;QAYN,IAAI,MAA+B,CAAA;QACnC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,QAAQ,EAAE,EAAE,CAAC,CAAA;QAC5C,IAAI,IAAyB,CAAA;QAC7B,IAAI,IAAI,IAAI,GAAG,EAAE;YACf,MAAM,GAAG,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAA;YAC9B,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC;gBAClB,wEAAwE;gBACxE,gCAAgC;iBAC/B,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,KAAK,KAAK,SAAS,CAAC;gBAC5C,mCAAmC;iBAClC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,EAAE,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,CAAC;iBAC1F,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,CAAC,EAAE,EAAE;gBACzB,GAAG,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACtC,CAAC,CAAC,CAAA;SACL;aAAM;YACL,MAAM,GAAG,MAAM,CAAA;YACf,IAAI,GAAG,IAAI,CAAA;SACZ;QAED,MAAM,OAAO,qBAAQ,IAAI,CAAC,OAAO,CAAE,CAAA;QACnC,IAAI,KAAK,EAAE;YACT,OAAO,CAAC,QAAQ,CAAC,GAAG,SAAS,KAAK,EAAE,CAAA;SACrC;QAED,OAAO,IAAI,gCAAsB,CAAC;YAChC,MAAM;YACN,GAAG;YACH,OAAO;YACP,MAAM,EAAE,IAAI,CAAC,UAAU;YACvB,IAAI;YACJ,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,UAAU,EAAE,KAAK;SAC4B,CAAC,CAAA;IAClD,CAAC;CACF;AApKD,kCAoKC"}

View File

@@ -0,0 +1,17 @@
/**
* Error format
*
* {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes}
*/
export default class PostgrestError extends Error {
details: string;
hint: string;
code: string;
constructor(context: {
message: string;
details: string;
hint: string;
code: string;
});
}
//# sourceMappingURL=PostgrestError.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestError.d.ts","sourceRoot":"","sources":["../../src/PostgrestError.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,MAAM,CAAC,OAAO,OAAO,cAAe,SAAQ,KAAK;IAC/C,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;gBAEA,OAAO,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE;CAOtF"}

View File

@@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Error format
*
* {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes}
*/
class PostgrestError extends Error {
constructor(context) {
super(context.message);
this.name = 'PostgrestError';
this.details = context.details;
this.hint = context.hint;
this.code = context.code;
}
}
exports.default = PostgrestError;
//# sourceMappingURL=PostgrestError.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestError.js","sourceRoot":"","sources":["../../src/PostgrestError.ts"],"names":[],"mappings":";;AAAA;;;;GAIG;AACH,MAAqB,cAAe,SAAQ,KAAK;IAK/C,YAAY,OAAyE;QACnF,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;QACtB,IAAI,CAAC,IAAI,GAAG,gBAAgB,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAA;QAC9B,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAA;QACxB,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAA;IAC1B,CAAC;CACF;AAZD,iCAYC"}

View File

@@ -0,0 +1,105 @@
import PostgrestTransformBuilder from './PostgrestTransformBuilder';
import { JsonPathToAccessor, JsonPathToType } from './select-query-parser/utils';
import { GenericSchema } from './types';
declare type FilterOperator = 'eq' | 'neq' | 'gt' | 'gte' | 'lt' | 'lte' | 'like' | 'ilike' | 'is' | 'in' | 'cs' | 'cd' | 'sl' | 'sr' | 'nxl' | 'nxr' | 'adj' | 'ov' | 'fts' | 'plfts' | 'phfts' | 'wfts';
export declare type IsStringOperator<Path extends string> = Path extends `${string}->>${string}` ? true : false;
declare type ResolveFilterValue<Schema extends GenericSchema, Row extends Record<string, unknown>, ColumnName extends string> = ColumnName extends `${infer RelationshipTable}.${infer Remainder}` ? Remainder extends `${infer _}.${infer _}` ? ResolveFilterValue<Schema, Row, Remainder> : ResolveFilterRelationshipValue<Schema, RelationshipTable, Remainder> : ColumnName extends keyof Row ? Row[ColumnName] : IsStringOperator<ColumnName> extends true ? string : JsonPathToType<Row, JsonPathToAccessor<ColumnName>> extends infer JsonPathValue ? JsonPathValue extends never ? never : JsonPathValue : never;
declare type ResolveFilterRelationshipValue<Schema extends GenericSchema, RelationshipTable extends string, RelationshipColumn extends string> = Schema['Tables'] & Schema['Views'] extends infer TablesAndViews ? RelationshipTable extends keyof TablesAndViews ? 'Row' extends keyof TablesAndViews[RelationshipTable] ? RelationshipColumn extends keyof TablesAndViews[RelationshipTable]['Row'] ? TablesAndViews[RelationshipTable]['Row'][RelationshipColumn] : unknown : unknown : unknown : never;
export default class PostgrestFilterBuilder<Schema extends GenericSchema, Row extends Record<string, unknown>, Result, RelationName = unknown, Relationships = unknown> extends PostgrestTransformBuilder<Schema, Row, Result, RelationName, Relationships> {
/**
* Match only rows where `column` is equal to `value`.
*
* To check if the value of `column` is NULL, you should use `.is()` instead.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
eq<ColumnName extends string>(column: ColumnName, value: ResolveFilterValue<Schema, Row, ColumnName> extends never ? NonNullable<unknown> : ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue ? NonNullable<ResolvedFilterValue> : never): this;
/**
* Match only rows where `column` is not equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
neq<ColumnName extends string>(column: ColumnName, value: ResolveFilterValue<Schema, Row, ColumnName> extends never ? unknown : ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue ? ResolvedFilterValue : never): this;
gt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this;
gt(column: string, value: unknown): this;
gte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this;
gte(column: string, value: unknown): this;
lt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this;
lt(column: string, value: unknown): this;
lte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this;
lte(column: string, value: unknown): this;
like<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this;
like(column: string, pattern: string): this;
likeAllOf<ColumnName extends string & keyof Row>(column: ColumnName, patterns: readonly string[]): this;
likeAllOf(column: string, patterns: readonly string[]): this;
likeAnyOf<ColumnName extends string & keyof Row>(column: ColumnName, patterns: readonly string[]): this;
likeAnyOf(column: string, patterns: readonly string[]): this;
ilike<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this;
ilike(column: string, pattern: string): this;
ilikeAllOf<ColumnName extends string & keyof Row>(column: ColumnName, patterns: readonly string[]): this;
ilikeAllOf(column: string, patterns: readonly string[]): this;
ilikeAnyOf<ColumnName extends string & keyof Row>(column: ColumnName, patterns: readonly string[]): this;
ilikeAnyOf(column: string, patterns: readonly string[]): this;
is<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName] & (boolean | null)): this;
is(column: string, value: boolean | null): this;
/**
* Match only rows where `column` is included in the `values` array.
*
* @param column - The column to filter on
* @param values - The values array to filter with
*/
in<ColumnName extends string>(column: ColumnName, values: ReadonlyArray<ResolveFilterValue<Schema, Row, ColumnName> extends never ? unknown : ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue ? ResolvedFilterValue : never>): this;
contains<ColumnName extends string & keyof Row>(column: ColumnName, value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>): this;
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this;
containedBy<ColumnName extends string & keyof Row>(column: ColumnName, value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>): this;
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this;
rangeGt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this;
rangeGt(column: string, range: string): this;
rangeGte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this;
rangeGte(column: string, range: string): this;
rangeLt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this;
rangeLt(column: string, range: string): this;
rangeLte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this;
rangeLte(column: string, range: string): this;
rangeAdjacent<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this;
rangeAdjacent(column: string, range: string): this;
overlaps<ColumnName extends string & keyof Row>(column: ColumnName, value: string | ReadonlyArray<Row[ColumnName]>): this;
overlaps(column: string, value: string | readonly unknown[]): this;
textSearch<ColumnName extends string & keyof Row>(column: ColumnName, query: string, options?: {
config?: string;
type?: 'plain' | 'phrase' | 'websearch';
}): this;
textSearch(column: string, query: string, options?: {
config?: string;
type?: 'plain' | 'phrase' | 'websearch';
}): this;
match<ColumnName extends string & keyof Row>(query: Record<ColumnName, Row[ColumnName]>): this;
match(query: Record<string, unknown>): this;
not<ColumnName extends string & keyof Row>(column: ColumnName, operator: FilterOperator, value: Row[ColumnName]): this;
not(column: string, operator: string, value: unknown): this;
/**
* Match only rows which satisfy at least one of the filters.
*
* Unlike most filters, `filters` is used as-is and needs to follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure it's properly sanitized.
*
* It's currently not possible to do an `.or()` filter across multiple tables.
*
* @param filters - The filters to use, following PostgREST syntax
* @param options - Named parameters
* @param options.referencedTable - Set this to filter on referenced tables
* instead of the parent table
* @param options.foreignTable - Deprecated, use `referencedTable` instead
*/
or(filters: string, { foreignTable, referencedTable, }?: {
foreignTable?: string;
referencedTable?: string;
}): this;
filter<ColumnName extends string & keyof Row>(column: ColumnName, operator: `${'' | 'not.'}${FilterOperator}`, value: unknown): this;
filter(column: string, operator: string, value: unknown): this;
}
export {};
//# sourceMappingURL=PostgrestFilterBuilder.d.ts.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,381 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const PostgrestTransformBuilder_1 = __importDefault(require("./PostgrestTransformBuilder"));
class PostgrestFilterBuilder extends PostgrestTransformBuilder_1.default {
/**
* Match only rows where `column` is equal to `value`.
*
* To check if the value of `column` is NULL, you should use `.is()` instead.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
eq(column, value) {
this.url.searchParams.append(column, `eq.${value}`);
return this;
}
/**
* Match only rows where `column` is not equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
neq(column, value) {
this.url.searchParams.append(column, `neq.${value}`);
return this;
}
/**
* Match only rows where `column` is greater than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gt(column, value) {
this.url.searchParams.append(column, `gt.${value}`);
return this;
}
/**
* Match only rows where `column` is greater than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gte(column, value) {
this.url.searchParams.append(column, `gte.${value}`);
return this;
}
/**
* Match only rows where `column` is less than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lt(column, value) {
this.url.searchParams.append(column, `lt.${value}`);
return this;
}
/**
* Match only rows where `column` is less than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lte(column, value) {
this.url.searchParams.append(column, `lte.${value}`);
return this;
}
/**
* Match only rows where `column` matches `pattern` case-sensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
like(column, pattern) {
this.url.searchParams.append(column, `like.${pattern}`);
return this;
}
/**
* Match only rows where `column` matches all of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAllOf(column, patterns) {
this.url.searchParams.append(column, `like(all).{${patterns.join(',')}}`);
return this;
}
/**
* Match only rows where `column` matches any of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAnyOf(column, patterns) {
this.url.searchParams.append(column, `like(any).{${patterns.join(',')}}`);
return this;
}
/**
* Match only rows where `column` matches `pattern` case-insensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
ilike(column, pattern) {
this.url.searchParams.append(column, `ilike.${pattern}`);
return this;
}
/**
* Match only rows where `column` matches all of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAllOf(column, patterns) {
this.url.searchParams.append(column, `ilike(all).{${patterns.join(',')}}`);
return this;
}
/**
* Match only rows where `column` matches any of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAnyOf(column, patterns) {
this.url.searchParams.append(column, `ilike(any).{${patterns.join(',')}}`);
return this;
}
/**
* Match only rows where `column` IS `value`.
*
* For non-boolean columns, this is only relevant for checking if the value of
* `column` is NULL by setting `value` to `null`.
*
* For boolean columns, you can also set `value` to `true` or `false` and it
* will behave the same way as `.eq()`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
is(column, value) {
this.url.searchParams.append(column, `is.${value}`);
return this;
}
/**
* Match only rows where `column` is included in the `values` array.
*
* @param column - The column to filter on
* @param values - The values array to filter with
*/
in(column, values) {
const cleanedValues = Array.from(new Set(values))
.map((s) => {
// handle postgrest reserved characters
// https://postgrest.org/en/v7.0.0/api.html#reserved-characters
if (typeof s === 'string' && new RegExp('[,()]').test(s))
return `"${s}"`;
else
return `${s}`;
})
.join(',');
this.url.searchParams.append(column, `in.(${cleanedValues})`);
return this;
}
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* `column` contains every element appearing in `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
contains(column, value) {
if (typeof value === 'string') {
// range types can be inclusive '[', ']' or exclusive '(', ')' so just
// keep it simple and accept a string
this.url.searchParams.append(column, `cs.${value}`);
}
else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cs.{${value.join(',')}}`);
}
else {
// json
this.url.searchParams.append(column, `cs.${JSON.stringify(value)}`);
}
return this;
}
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* every element appearing in `column` is contained by `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
containedBy(column, value) {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `cd.${value}`);
}
else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cd.{${value.join(',')}}`);
}
else {
// json
this.url.searchParams.append(column, `cd.${JSON.stringify(value)}`);
}
return this;
}
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is greater than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGt(column, range) {
this.url.searchParams.append(column, `sr.${range}`);
return this;
}
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or greater than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGte(column, range) {
this.url.searchParams.append(column, `nxl.${range}`);
return this;
}
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is less than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLt(column, range) {
this.url.searchParams.append(column, `sl.${range}`);
return this;
}
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or less than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLte(column, range) {
this.url.searchParams.append(column, `nxr.${range}`);
return this;
}
/**
* Only relevant for range columns. Match only rows where `column` is
* mutually exclusive to `range` and there can be no element between the two
* ranges.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeAdjacent(column, range) {
this.url.searchParams.append(column, `adj.${range}`);
return this;
}
/**
* Only relevant for array and range columns. Match only rows where
* `column` and `value` have an element in common.
*
* @param column - The array or range column to filter on
* @param value - The array or range value to filter with
*/
overlaps(column, value) {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `ov.${value}`);
}
else {
// array
this.url.searchParams.append(column, `ov.{${value.join(',')}}`);
}
return this;
}
/**
* Only relevant for text and tsvector columns. Match only rows where
* `column` matches the query string in `query`.
*
* @param column - The text or tsvector column to filter on
* @param query - The query text to match with
* @param options - Named parameters
* @param options.config - The text search configuration to use
* @param options.type - Change how the `query` text is interpreted
*/
textSearch(column, query, { config, type } = {}) {
let typePart = '';
if (type === 'plain') {
typePart = 'pl';
}
else if (type === 'phrase') {
typePart = 'ph';
}
else if (type === 'websearch') {
typePart = 'w';
}
const configPart = config === undefined ? '' : `(${config})`;
this.url.searchParams.append(column, `${typePart}fts${configPart}.${query}`);
return this;
}
/**
* Match only rows where each column in `query` keys is equal to its
* associated value. Shorthand for multiple `.eq()`s.
*
* @param query - The object to filter with, with column names as keys mapped
* to their filter values
*/
match(query) {
Object.entries(query).forEach(([column, value]) => {
this.url.searchParams.append(column, `eq.${value}`);
});
return this;
}
/**
* Match only rows which doesn't satisfy the filter.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to be negated to filter with, following
* PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
not(column, operator, value) {
this.url.searchParams.append(column, `not.${operator}.${value}`);
return this;
}
/**
* Match only rows which satisfy at least one of the filters.
*
* Unlike most filters, `filters` is used as-is and needs to follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure it's properly sanitized.
*
* It's currently not possible to do an `.or()` filter across multiple tables.
*
* @param filters - The filters to use, following PostgREST syntax
* @param options - Named parameters
* @param options.referencedTable - Set this to filter on referenced tables
* instead of the parent table
* @param options.foreignTable - Deprecated, use `referencedTable` instead
*/
or(filters, { foreignTable, referencedTable = foreignTable, } = {}) {
const key = referencedTable ? `${referencedTable}.or` : 'or';
this.url.searchParams.append(key, `(${filters})`);
return this;
}
/**
* Match only rows which satisfy the filter. This is an escape hatch - you
* should use the specific filter methods wherever possible.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to filter with, following PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
filter(column, operator, value) {
this.url.searchParams.append(column, `${operator}.${value}`);
return this;
}
}
exports.default = PostgrestFilterBuilder;
//# sourceMappingURL=PostgrestFilterBuilder.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,117 @@
import PostgrestFilterBuilder from './PostgrestFilterBuilder';
import { GetResult } from './select-query-parser/result';
import { Fetch, GenericSchema, GenericTable, GenericView } from './types';
export default class PostgrestQueryBuilder<Schema extends GenericSchema, Relation extends GenericTable | GenericView, RelationName = unknown, Relationships = Relation extends {
Relationships: infer R;
} ? R : unknown> {
url: URL;
headers: Record<string, string>;
schema?: string;
signal?: AbortSignal;
fetch?: Fetch;
constructor(url: URL, { headers, schema, fetch, }: {
headers?: Record<string, string>;
schema?: string;
fetch?: Fetch;
});
/**
* Perform a SELECT query on the table or view.
*
* @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName`
*
* @param options - Named parameters
*
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
*
* @param options.count - Count algorithm to use to count rows in the table or view.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
select<Query extends string = '*', ResultOne = GetResult<Schema, Relation['Row'], RelationName, Relationships, Query>>(columns?: Query, { head, count, }?: {
head?: boolean;
count?: 'exact' | 'planned' | 'estimated';
}): PostgrestFilterBuilder<Schema, Relation['Row'], ResultOne[], RelationName, Relationships>;
insert<Row extends Relation extends {
Insert: unknown;
} ? Relation['Insert'] : never>(values: Row, options?: {
count?: 'exact' | 'planned' | 'estimated';
}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>;
insert<Row extends Relation extends {
Insert: unknown;
} ? Relation['Insert'] : never>(values: Row[], options?: {
count?: 'exact' | 'planned' | 'estimated';
defaultToNull?: boolean;
}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>;
upsert<Row extends Relation extends {
Insert: unknown;
} ? Relation['Insert'] : never>(values: Row, options?: {
onConflict?: string;
ignoreDuplicates?: boolean;
count?: 'exact' | 'planned' | 'estimated';
}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>;
upsert<Row extends Relation extends {
Insert: unknown;
} ? Relation['Insert'] : never>(values: Row[], options?: {
onConflict?: string;
ignoreDuplicates?: boolean;
count?: 'exact' | 'planned' | 'estimated';
defaultToNull?: boolean;
}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>;
/**
* Perform an UPDATE on the table or view.
*
* By default, updated rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param values - The values to update with
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count updated rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
update<Row extends Relation extends {
Update: unknown;
} ? Relation['Update'] : never>(values: Row, { count, }?: {
count?: 'exact' | 'planned' | 'estimated';
}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>;
/**
* Perform a DELETE on the table or view.
*
* By default, deleted rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count deleted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
delete({ count, }?: {
count?: 'exact' | 'planned' | 'estimated';
}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>;
}
//# sourceMappingURL=PostgrestQueryBuilder.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestQueryBuilder.d.ts","sourceRoot":"","sources":["../../src/PostgrestQueryBuilder.ts"],"names":[],"mappings":"AACA,OAAO,sBAAsB,MAAM,0BAA0B,CAAA;AAC7D,OAAO,EAAE,SAAS,EAAE,MAAM,8BAA8B,CAAA;AACxD,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAEzE,MAAM,CAAC,OAAO,OAAO,qBAAqB,CACxC,MAAM,SAAS,aAAa,EAC5B,QAAQ,SAAS,YAAY,GAAG,WAAW,EAC3C,YAAY,GAAG,OAAO,EACtB,aAAa,GAAG,QAAQ,SAAS;IAAE,aAAa,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,CAAC,GAAG,OAAO;IAEzE,GAAG,EAAE,GAAG,CAAA;IACR,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC/B,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,KAAK,CAAC,EAAE,KAAK,CAAA;gBAGX,GAAG,EAAE,GAAG,EACR,EACE,OAAY,EACZ,MAAM,EACN,KAAK,GACN,EAAE;QACD,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QAChC,MAAM,CAAC,EAAE,MAAM,CAAA;QACf,KAAK,CAAC,EAAE,KAAK,CAAA;KACd;IAQH;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,MAAM,CACJ,KAAK,SAAS,MAAM,GAAG,GAAG,EAC1B,SAAS,GAAG,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,YAAY,EAAE,aAAa,EAAE,KAAK,CAAC,EAElF,OAAO,CAAC,EAAE,KAAK,EACf,EACE,IAAY,EACZ,KAAK,GACN,GAAE;QACD,IAAI,CAAC,EAAE,OAAO,CAAA;QACd,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;KACrC,GACL,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,SAAS,EAAE,EAAE,YAAY,EAAE,aAAa,CAAC;IAgC5F,MAAM,CAAC,GAAG,SAAS,QAAQ,SAAS;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,QAAQ,CAAC,GAAG,KAAK,EAClF,MAAM,EAAE,GAAG,EACX,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;KAC1C,GACA,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,aAAa,CAAC;IACrF,MAAM,CAAC,GAAG,SAAS,QAAQ,SAAS;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,QAAQ,CAAC,GAAG,KAAK,EAClF,MAAM,EAAE,GAAG,EAAE,EACb,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;QACzC,aAAa,CAAC,EAAE,OAAO,CAAA;KACxB,GACA,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,aAAa,CAAC;IAuErF,MAAM,CAAC,GAAG,SAAS,QAAQ,SAAS;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,QAAQ,CAAC,GAAG,KAAK,EAClF,MAAM,EAAE,GAAG,EACX,OAAO,CAAC,EAAE;QACR,UAAU,CAAC,EAAE,MAAM,CAAA;QACnB,gBAAgB,CAAC,EAAE,OAAO,CAAA;QAC1B,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;KAC1C,GACA,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,aAAa,CAAC;IACrF,MAAM,CAAC,GAAG,SAAS,QAAQ,SAAS;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,QAAQ,CAAC,GAAG,KAAK,EAClF,MAAM,EAAE,GAAG,EAAE,EACb,OAAO,CAAC,EAAE;QACR,UAAU,CAAC,EAAE,MAAM,CAAA;QACnB,gBAAgB,CAAC,EAAE,OAAO,CAAA;QAC1B,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;QACzC,aAAa,CAAC,EAAE,OAAO,CAAA;KACxB,GACA,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,aAAa,CAAC;IAwFrF;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,MAAM,CAAC,GAAG,SAAS,QAAQ,SAAS;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,QAAQ,CAAC,GAAG,KAAK,EAClF,MAAM,EAAE,GAAG,EACX,EACE,KAAK,GACN,GAAE;QACD,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;KACrC,GACL,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,aAAa,CAAC;IAsBrF;;;;;;;;;;;;;;;;;;OAkBG;IACH,MAAM,CAAC,EACL,KAAK,GACN,GAAE;QACD,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,WAAW,CAAA;KACrC,GAAG,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,aAAa,CAAC;CAoB5F"}

View File

@@ -0,0 +1,271 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const PostgrestFilterBuilder_1 = __importDefault(require("./PostgrestFilterBuilder"));
class PostgrestQueryBuilder {
constructor(url, { headers = {}, schema, fetch, }) {
this.url = url;
this.headers = headers;
this.schema = schema;
this.fetch = fetch;
}
/**
* Perform a SELECT query on the table or view.
*
* @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName`
*
* @param options - Named parameters
*
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
*
* @param options.count - Count algorithm to use to count rows in the table or view.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
select(columns, { head = false, count, } = {}) {
const method = head ? 'HEAD' : 'GET';
// Remove whitespaces except when quoted
let quoted = false;
const cleanedColumns = (columns !== null && columns !== void 0 ? columns : '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return '';
}
if (c === '"') {
quoted = !quoted;
}
return c;
})
.join('');
this.url.searchParams.set('select', cleanedColumns);
if (count) {
this.headers['Prefer'] = `count=${count}`;
}
return new PostgrestFilterBuilder_1.default({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
});
}
/**
* Perform an INSERT into the table or view.
*
* By default, inserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to insert. Pass an object to insert a single row
* or an array to insert multiple rows.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count inserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. Only applies for bulk
* inserts.
*/
insert(values, { count, defaultToNull = true, } = {}) {
const method = 'POST';
const prefersHeaders = [];
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer']);
}
if (count) {
prefersHeaders.push(`count=${count}`);
}
if (!defaultToNull) {
prefersHeaders.push('missing=default');
}
this.headers['Prefer'] = prefersHeaders.join(',');
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), []);
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`);
this.url.searchParams.set('columns', uniqueColumns.join(','));
}
}
return new PostgrestFilterBuilder_1.default({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
});
}
/**
* Perform an UPSERT on the table or view. Depending on the column(s) passed
* to `onConflict`, `.upsert()` allows you to perform the equivalent of
* `.insert()` if a row with the corresponding `onConflict` columns doesn't
* exist, or if it does exist, perform an alternative action depending on
* `ignoreDuplicates`.
*
* By default, upserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to upsert with. Pass an object to upsert a
* single row or an array to upsert multiple rows.
*
* @param options - Named parameters
*
* @param options.onConflict - Comma-separated UNIQUE column(s) to specify how
* duplicate rows are determined. Two rows are duplicates if all the
* `onConflict` columns are equal.
*
* @param options.ignoreDuplicates - If `true`, duplicate rows are ignored. If
* `false`, duplicate rows are merged with existing rows.
*
* @param options.count - Count algorithm to use to count upserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. This only applies when
* inserting new rows, not when merging with existing rows under
* `ignoreDuplicates: false`. This also only applies when doing bulk upserts.
*/
upsert(values, { onConflict, ignoreDuplicates = false, count, defaultToNull = true, } = {}) {
const method = 'POST';
const prefersHeaders = [`resolution=${ignoreDuplicates ? 'ignore' : 'merge'}-duplicates`];
if (onConflict !== undefined)
this.url.searchParams.set('on_conflict', onConflict);
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer']);
}
if (count) {
prefersHeaders.push(`count=${count}`);
}
if (!defaultToNull) {
prefersHeaders.push('missing=default');
}
this.headers['Prefer'] = prefersHeaders.join(',');
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), []);
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`);
this.url.searchParams.set('columns', uniqueColumns.join(','));
}
}
return new PostgrestFilterBuilder_1.default({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
});
}
/**
* Perform an UPDATE on the table or view.
*
* By default, updated rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param values - The values to update with
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count updated rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
update(values, { count, } = {}) {
const method = 'PATCH';
const prefersHeaders = [];
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer']);
}
if (count) {
prefersHeaders.push(`count=${count}`);
}
this.headers['Prefer'] = prefersHeaders.join(',');
return new PostgrestFilterBuilder_1.default({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
});
}
/**
* Perform a DELETE on the table or view.
*
* By default, deleted rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count deleted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
delete({ count, } = {}) {
const method = 'DELETE';
const prefersHeaders = [];
if (count) {
prefersHeaders.push(`count=${count}`);
}
if (this.headers['Prefer']) {
prefersHeaders.unshift(this.headers['Prefer']);
}
this.headers['Prefer'] = prefersHeaders.join(',');
return new PostgrestFilterBuilder_1.default({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
});
}
}
exports.default = PostgrestQueryBuilder;
//# sourceMappingURL=PostgrestQueryBuilder.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,149 @@
import PostgrestBuilder from './PostgrestBuilder';
import { GetResult } from './select-query-parser/result';
import { GenericSchema, CheckMatchingArrayTypes } from './types';
export default class PostgrestTransformBuilder<Schema extends GenericSchema, Row extends Record<string, unknown>, Result, RelationName = unknown, Relationships = unknown> extends PostgrestBuilder<Result> {
/**
* Perform a SELECT on the query result.
*
* By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not
* return modified rows. By calling this method, modified rows are returned in
* `data`.
*
* @param columns - The columns to retrieve, separated by commas
*/
select<Query extends string = '*', NewResultOne = GetResult<Schema, Row, RelationName, Relationships, Query>>(columns?: Query): PostgrestTransformBuilder<Schema, Row, NewResultOne[], RelationName, Relationships>;
order<ColumnName extends string & keyof Row>(column: ColumnName, options?: {
ascending?: boolean;
nullsFirst?: boolean;
referencedTable?: undefined;
}): this;
order(column: string, options?: {
ascending?: boolean;
nullsFirst?: boolean;
referencedTable?: string;
}): this;
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order<ColumnName extends string & keyof Row>(column: ColumnName, options?: {
ascending?: boolean;
nullsFirst?: boolean;
foreignTable?: undefined;
}): this;
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order(column: string, options?: {
ascending?: boolean;
nullsFirst?: boolean;
foreignTable?: string;
}): this;
/**
* Limit the query result by `count`.
*
* @param count - The maximum number of rows to return
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
limit(count: number, { foreignTable, referencedTable, }?: {
foreignTable?: string;
referencedTable?: string;
}): this;
/**
* Limit the query result by starting at an offset `from` and ending at the offset `to`.
* Only records within this range are returned.
* This respects the query order and if there is no order clause the range could behave unexpectedly.
* The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third
* and fourth rows of the query.
*
* @param from - The starting index from which to limit the result
* @param to - The last index to which to limit the result
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
range(from: number, to: number, { foreignTable, referencedTable, }?: {
foreignTable?: string;
referencedTable?: string;
}): this;
/**
* Set the AbortSignal for the fetch request.
*
* @param signal - The AbortSignal to use for the fetch request
*/
abortSignal(signal: AbortSignal): this;
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be one row (e.g. using `.limit(1)`), otherwise this
* returns an error.
*/
single<ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never>(): PostgrestBuilder<ResultOne>;
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be zero or one row (e.g. using `.limit(1)`), otherwise
* this returns an error.
*/
maybeSingle<ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never>(): PostgrestBuilder<ResultOne | null>;
/**
* Return `data` as a string in CSV format.
*/
csv(): PostgrestBuilder<string>;
/**
* Return `data` as an object in [GeoJSON](https://geojson.org) format.
*/
geojson(): PostgrestBuilder<Record<string, unknown>>;
/**
* Return `data` as the EXPLAIN plan for the query.
*
* You need to enable the
* [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain)
* setting before using this method.
*
* @param options - Named parameters
*
* @param options.analyze - If `true`, the query will be executed and the
* actual run time will be returned
*
* @param options.verbose - If `true`, the query identifier will be returned
* and `data` will include the output columns of the query
*
* @param options.settings - If `true`, include information on configuration
* parameters that affect query planning
*
* @param options.buffers - If `true`, include information on buffer usage
*
* @param options.wal - If `true`, include information on WAL record generation
*
* @param options.format - The format of the output, can be `"text"` (default)
* or `"json"`
*/
explain({ analyze, verbose, settings, buffers, wal, format, }?: {
analyze?: boolean;
verbose?: boolean;
settings?: boolean;
buffers?: boolean;
wal?: boolean;
format?: 'json' | 'text';
}): PostgrestBuilder<Record<string, unknown>[]> | PostgrestBuilder<string>;
/**
* Rollback the query.
*
* `data` will still be returned, but the query is not committed.
*/
rollback(): this;
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestTransformBuilder<Schema, Row, CheckMatchingArrayTypes<Result, NewResult>, RelationName, Relationships>;
}
//# sourceMappingURL=PostgrestTransformBuilder.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestTransformBuilder.d.ts","sourceRoot":"","sources":["../../src/PostgrestTransformBuilder.ts"],"names":[],"mappings":"AAAA,OAAO,gBAAgB,MAAM,oBAAoB,CAAA;AACjD,OAAO,EAAE,SAAS,EAAE,MAAM,8BAA8B,CAAA;AACxD,OAAO,EAAE,aAAa,EAAE,uBAAuB,EAAE,MAAM,SAAS,CAAA;AAEhE,MAAM,CAAC,OAAO,OAAO,yBAAyB,CAC5C,MAAM,SAAS,aAAa,EAC5B,GAAG,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACnC,MAAM,EACN,YAAY,GAAG,OAAO,EACtB,aAAa,GAAG,OAAO,CACvB,SAAQ,gBAAgB,CAAC,MAAM,CAAC;IAChC;;;;;;;;OAQG;IACH,MAAM,CACJ,KAAK,SAAS,MAAM,GAAG,GAAG,EAC1B,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE,GAAG,EAAE,YAAY,EAAE,aAAa,EAAE,KAAK,CAAC,EAEzE,OAAO,CAAC,EAAE,KAAK,GACd,yBAAyB,CAAC,MAAM,EAAE,GAAG,EAAE,YAAY,EAAE,EAAE,YAAY,EAAE,aAAa,CAAC;IA6BtF,KAAK,CAAC,UAAU,SAAS,MAAM,GAAG,MAAM,GAAG,EACzC,MAAM,EAAE,UAAU,EAClB,OAAO,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,OAAO,CAAC;QAAC,UAAU,CAAC,EAAE,OAAO,CAAC;QAAC,eAAe,CAAC,EAAE,SAAS,CAAA;KAAE,GACnF,IAAI;IACP,KAAK,CACH,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,OAAO,CAAC;QAAC,UAAU,CAAC,EAAE,OAAO,CAAC;QAAC,eAAe,CAAC,EAAE,MAAM,CAAA;KAAE,GAChF,IAAI;IACP;;OAEG;IACH,KAAK,CAAC,UAAU,SAAS,MAAM,GAAG,MAAM,GAAG,EACzC,MAAM,EAAE,UAAU,EAClB,OAAO,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,OAAO,CAAC;QAAC,UAAU,CAAC,EAAE,OAAO,CAAC;QAAC,YAAY,CAAC,EAAE,SAAS,CAAA;KAAE,GAChF,IAAI;IACP;;OAEG;IACH,KAAK,CACH,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,OAAO,CAAC;QAAC,UAAU,CAAC,EAAE,OAAO,CAAC;QAAC,YAAY,CAAC,EAAE,MAAM,CAAA;KAAE,GAC7E,IAAI;IA6CP;;;;;;;;;OASG;IACH,KAAK,CACH,KAAK,EAAE,MAAM,EACb,EACE,YAAY,EACZ,eAA8B,GAC/B,GAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,eAAe,CAAC,EAAE,MAAM,CAAA;KAAO,GAC1D,IAAI;IAMP;;;;;;;;;;;;;;OAcG;IACH,KAAK,CACH,IAAI,EAAE,MAAM,EACZ,EAAE,EAAE,MAAM,EACV,EACE,YAAY,EACZ,eAA8B,GAC/B,GAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,eAAe,CAAC,EAAE,MAAM,CAAA;KAAO,GAC1D,IAAI;IAUP;;;;OAIG;IACH,WAAW,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI;IAKtC;;;;;OAKG;IACH,MAAM,CACJ,SAAS,GAAG,MAAM,SAAS,CAAC,MAAM,SAAS,CAAC,EAAE,GAAG,SAAS,GAAG,KAAK,KAC/D,gBAAgB,CAAC,SAAS,CAAC;IAKhC;;;;;OAKG;IACH,WAAW,CACT,SAAS,GAAG,MAAM,SAAS,CAAC,MAAM,SAAS,CAAC,EAAE,GAAG,SAAS,GAAG,KAAK,KAC/D,gBAAgB,CAAC,SAAS,GAAG,IAAI,CAAC;IAYvC;;OAEG;IACH,GAAG,IAAI,gBAAgB,CAAC,MAAM,CAAC;IAK/B;;OAEG;IACH,OAAO,IAAI,gBAAgB,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAKpD;;;;;;;;;;;;;;;;;;;;;;;;OAwBG;IACH,OAAO,CAAC,EACN,OAAe,EACf,OAAe,EACf,QAAgB,EAChB,OAAe,EACf,GAAW,EACX,MAAe,GAChB,GAAE;QACD,OAAO,CAAC,EAAE,OAAO,CAAA;QACjB,OAAO,CAAC,EAAE,OAAO,CAAA;QACjB,QAAQ,CAAC,EAAE,OAAO,CAAA;QAClB,OAAO,CAAC,EAAE,OAAO,CAAA;QACjB,GAAG,CAAC,EAAE,OAAO,CAAA;QACb,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;KACpB,GAAG,gBAAgB,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,gBAAgB,CAAC,MAAM,CAAC;IAmB/E;;;;OAIG;IACH,QAAQ,IAAI,IAAI;IAShB;;;;;OAKG;IACH,OAAO,CAAC,SAAS,KAAK,yBAAyB,CAC7C,MAAM,EACN,GAAG,EACH,uBAAuB,CAAC,MAAM,EAAE,SAAS,CAAC,EAC1C,YAAY,EACZ,aAAa,CACd;CASF"}

View File

@@ -0,0 +1,222 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const PostgrestBuilder_1 = __importDefault(require("./PostgrestBuilder"));
class PostgrestTransformBuilder extends PostgrestBuilder_1.default {
/**
* Perform a SELECT on the query result.
*
* By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not
* return modified rows. By calling this method, modified rows are returned in
* `data`.
*
* @param columns - The columns to retrieve, separated by commas
*/
select(columns) {
// Remove whitespaces except when quoted
let quoted = false;
const cleanedColumns = (columns !== null && columns !== void 0 ? columns : '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return '';
}
if (c === '"') {
quoted = !quoted;
}
return c;
})
.join('');
this.url.searchParams.set('select', cleanedColumns);
if (this.headers['Prefer']) {
this.headers['Prefer'] += ',';
}
this.headers['Prefer'] += 'return=representation';
return this;
}
/**
* Order the query result by `column`.
*
* You can call this method multiple times to order by multiple columns.
*
* You can order referenced tables, but it only affects the ordering of the
* parent table if you use `!inner` in the query.
*
* @param column - The column to order by
* @param options - Named parameters
* @param options.ascending - If `true`, the result will be in ascending order
* @param options.nullsFirst - If `true`, `null`s appear first. If `false`,
* `null`s appear last.
* @param options.referencedTable - Set this to order a referenced table by
* its columns
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
order(column, { ascending = true, nullsFirst, foreignTable, referencedTable = foreignTable, } = {}) {
const key = referencedTable ? `${referencedTable}.order` : 'order';
const existingOrder = this.url.searchParams.get(key);
this.url.searchParams.set(key, `${existingOrder ? `${existingOrder},` : ''}${column}.${ascending ? 'asc' : 'desc'}${nullsFirst === undefined ? '' : nullsFirst ? '.nullsfirst' : '.nullslast'}`);
return this;
}
/**
* Limit the query result by `count`.
*
* @param count - The maximum number of rows to return
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
limit(count, { foreignTable, referencedTable = foreignTable, } = {}) {
const key = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`;
this.url.searchParams.set(key, `${count}`);
return this;
}
/**
* Limit the query result by starting at an offset `from` and ending at the offset `to`.
* Only records within this range are returned.
* This respects the query order and if there is no order clause the range could behave unexpectedly.
* The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third
* and fourth rows of the query.
*
* @param from - The starting index from which to limit the result
* @param to - The last index to which to limit the result
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
range(from, to, { foreignTable, referencedTable = foreignTable, } = {}) {
const keyOffset = typeof referencedTable === 'undefined' ? 'offset' : `${referencedTable}.offset`;
const keyLimit = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`;
this.url.searchParams.set(keyOffset, `${from}`);
// Range is inclusive, so add 1
this.url.searchParams.set(keyLimit, `${to - from + 1}`);
return this;
}
/**
* Set the AbortSignal for the fetch request.
*
* @param signal - The AbortSignal to use for the fetch request
*/
abortSignal(signal) {
this.signal = signal;
return this;
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be one row (e.g. using `.limit(1)`), otherwise this
* returns an error.
*/
single() {
this.headers['Accept'] = 'application/vnd.pgrst.object+json';
return this;
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be zero or one row (e.g. using `.limit(1)`), otherwise
* this returns an error.
*/
maybeSingle() {
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.method === 'GET') {
this.headers['Accept'] = 'application/json';
}
else {
this.headers['Accept'] = 'application/vnd.pgrst.object+json';
}
this.isMaybeSingle = true;
return this;
}
/**
* Return `data` as a string in CSV format.
*/
csv() {
this.headers['Accept'] = 'text/csv';
return this;
}
/**
* Return `data` as an object in [GeoJSON](https://geojson.org) format.
*/
geojson() {
this.headers['Accept'] = 'application/geo+json';
return this;
}
/**
* Return `data` as the EXPLAIN plan for the query.
*
* You need to enable the
* [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain)
* setting before using this method.
*
* @param options - Named parameters
*
* @param options.analyze - If `true`, the query will be executed and the
* actual run time will be returned
*
* @param options.verbose - If `true`, the query identifier will be returned
* and `data` will include the output columns of the query
*
* @param options.settings - If `true`, include information on configuration
* parameters that affect query planning
*
* @param options.buffers - If `true`, include information on buffer usage
*
* @param options.wal - If `true`, include information on WAL record generation
*
* @param options.format - The format of the output, can be `"text"` (default)
* or `"json"`
*/
explain({ analyze = false, verbose = false, settings = false, buffers = false, wal = false, format = 'text', } = {}) {
var _a;
const options = [
analyze ? 'analyze' : null,
verbose ? 'verbose' : null,
settings ? 'settings' : null,
buffers ? 'buffers' : null,
wal ? 'wal' : null,
]
.filter(Boolean)
.join('|');
// An Accept header can carry multiple media types but postgrest-js always sends one
const forMediatype = (_a = this.headers['Accept']) !== null && _a !== void 0 ? _a : 'application/json';
this.headers['Accept'] = `application/vnd.pgrst.plan+${format}; for="${forMediatype}"; options=${options};`;
if (format === 'json')
return this;
else
return this;
}
/**
* Rollback the query.
*
* `data` will still be returned, but the query is not committed.
*/
rollback() {
var _a;
if (((_a = this.headers['Prefer']) !== null && _a !== void 0 ? _a : '').trim().length > 0) {
this.headers['Prefer'] += ',tx=rollback';
}
else {
this.headers['Prefer'] = 'tx=rollback';
}
return this;
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns() {
return this;
}
}
exports.default = PostgrestTransformBuilder;
//# sourceMappingURL=PostgrestTransformBuilder.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"PostgrestTransformBuilder.js","sourceRoot":"","sources":["../../src/PostgrestTransformBuilder.ts"],"names":[],"mappings":";;;;;AAAA,0EAAiD;AAIjD,MAAqB,yBAMnB,SAAQ,0BAAwB;IAChC;;;;;;;;OAQG;IACH,MAAM,CAIJ,OAAe;QAEf,wCAAwC;QACxC,IAAI,MAAM,GAAG,KAAK,CAAA;QAClB,MAAM,cAAc,GAAG,CAAC,OAAO,aAAP,OAAO,cAAP,OAAO,GAAI,GAAG,CAAC;aACpC,KAAK,CAAC,EAAE,CAAC;aACT,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE;YACT,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE;gBAC3B,OAAO,EAAE,CAAA;aACV;YACD,IAAI,CAAC,KAAK,GAAG,EAAE;gBACb,MAAM,GAAG,CAAC,MAAM,CAAA;aACjB;YACD,OAAO,CAAC,CAAA;QACV,CAAC,CAAC;aACD,IAAI,CAAC,EAAE,CAAC,CAAA;QACX,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAA;QACnD,IAAI,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,GAAG,CAAA;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,uBAAuB,CAAA;QACjD,OAAO,IAMN,CAAA;IACH,CAAC;IAwBD;;;;;;;;;;;;;;;;;OAiBG;IACH,KAAK,CACH,MAAc,EACd,EACE,SAAS,GAAG,IAAI,EAChB,UAAU,EACV,YAAY,EACZ,eAAe,GAAG,YAAY,MAM5B,EAAE;QAEN,MAAM,GAAG,GAAG,eAAe,CAAC,CAAC,CAAC,GAAG,eAAe,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAA;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;QAEpD,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CACvB,GAAG,EACH,GAAG,aAAa,CAAC,CAAC,CAAC,GAAG,aAAa,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,MAAM,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,GAChF,UAAU,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,YAC/D,EAAE,CACH,CAAA;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;;;OASG;IACH,KAAK,CACH,KAAa,EACb,EACE,YAAY,EACZ,eAAe,GAAG,YAAY,MACyB,EAAE;QAE3D,MAAM,GAAG,GAAG,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,eAAe,QAAQ,CAAA;QACzF,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,KAAK,EAAE,CAAC,CAAA;QAC1C,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,KAAK,CACH,IAAY,EACZ,EAAU,EACV,EACE,YAAY,EACZ,eAAe,GAAG,YAAY,MACyB,EAAE;QAE3D,MAAM,SAAS,GACb,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,eAAe,SAAS,CAAA;QACjF,MAAM,QAAQ,GAAG,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,eAAe,QAAQ,CAAA;QAC9F,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,SAAS,EAAE,GAAG,IAAI,EAAE,CAAC,CAAA;QAC/C,+BAA+B;QAC/B,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,GAAG,IAAI,GAAG,CAAC,EAAE,CAAC,CAAA;QACvD,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;OAIG;IACH,WAAW,CAAC,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;OAKG;IACH,MAAM;QAGJ,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,mCAAmC,CAAA;QAC5D,OAAO,IAA8C,CAAA;IACvD,CAAC;IAED;;;;;OAKG;IACH,WAAW;QAGT,gFAAgF;QAChF,kEAAkE;QAClE,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,EAAE;YACzB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,kBAAkB,CAAA;SAC5C;aAAM;YACL,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,mCAAmC,CAAA;SAC7D;QACD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAA;QACzB,OAAO,IAAqD,CAAA;IAC9D,CAAC;IAED;;OAEG;IACH,GAAG;QACD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,UAAU,CAAA;QACnC,OAAO,IAA2C,CAAA;IACpD,CAAC;IAED;;OAEG;IACH,OAAO;QACL,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,sBAAsB,CAAA;QAC/C,OAAO,IAA4D,CAAA;IACrE,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;OAwBG;IACH,OAAO,CAAC,EACN,OAAO,GAAG,KAAK,EACf,OAAO,GAAG,KAAK,EACf,QAAQ,GAAG,KAAK,EAChB,OAAO,GAAG,KAAK,EACf,GAAG,GAAG,KAAK,EACX,MAAM,GAAG,MAAM,MAQb,EAAE;;QACJ,MAAM,OAAO,GAAG;YACd,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;YAC1B,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;YAC1B,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;YAC5B,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;YAC1B,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;SACnB;aACE,MAAM,CAAC,OAAO,CAAC;aACf,IAAI,CAAC,GAAG,CAAC,CAAA;QACZ,oFAAoF;QACpF,MAAM,YAAY,GAAG,MAAA,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,mCAAI,kBAAkB,CAAA;QACjE,IAAI,CAAC,OAAO,CACV,QAAQ,CACT,GAAG,8BAA8B,MAAM,UAAU,YAAY,cAAc,OAAO,GAAG,CAAA;QACtF,IAAI,MAAM,KAAK,MAAM;YAAE,OAAO,IAA8D,CAAA;;YACvF,OAAO,IAA2C,CAAA;IACzD,CAAC;IAED;;;;OAIG;IACH,QAAQ;;QACN,IAAI,CAAC,MAAA,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,mCAAI,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE;YACpD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,cAAc,CAAA;SACzC;aAAM;YACL,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,aAAa,CAAA;SACvC;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;OAKG;IACH,OAAO;QAOL,OAAO,IAMN,CAAA;IACH,CAAC;CACF;AAlUD,4CAkUC"}

View File

@@ -0,0 +1,4 @@
export declare const DEFAULT_HEADERS: {
'X-Client-Info': string;
};
//# sourceMappingURL=constants.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AACA,eAAO,MAAM,eAAe;;CAAiD,CAAA"}

View File

@@ -0,0 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DEFAULT_HEADERS = void 0;
const version_1 = require("./version");
exports.DEFAULT_HEADERS = { 'X-Client-Info': `postgrest-js/${version_1.version}` };
//# sourceMappingURL=constants.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAA,uCAAmC;AACtB,QAAA,eAAe,GAAG,EAAE,eAAe,EAAE,gBAAgB,iBAAO,EAAE,EAAE,CAAA"}

View File

@@ -0,0 +1,19 @@
import PostgrestClient from './PostgrestClient';
import PostgrestQueryBuilder from './PostgrestQueryBuilder';
import PostgrestFilterBuilder from './PostgrestFilterBuilder';
import PostgrestTransformBuilder from './PostgrestTransformBuilder';
import PostgrestBuilder from './PostgrestBuilder';
import PostgrestError from './PostgrestError';
export { PostgrestClient, PostgrestQueryBuilder, PostgrestFilterBuilder, PostgrestTransformBuilder, PostgrestBuilder, PostgrestError, };
declare const _default: {
PostgrestClient: typeof PostgrestClient;
PostgrestQueryBuilder: typeof PostgrestQueryBuilder;
PostgrestFilterBuilder: typeof PostgrestFilterBuilder;
PostgrestTransformBuilder: typeof PostgrestTransformBuilder;
PostgrestBuilder: typeof PostgrestBuilder;
PostgrestError: typeof PostgrestError;
};
export default _default;
export type { PostgrestResponse, PostgrestResponseFailure, PostgrestResponseSuccess, PostgrestSingleResponse, PostgrestMaybeSingleResponse, } from './types';
export type { GetResult as UnstableGetResult } from './select-query-parser/result';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,eAAe,MAAM,mBAAmB,CAAA;AAC/C,OAAO,qBAAqB,MAAM,yBAAyB,CAAA;AAC3D,OAAO,sBAAsB,MAAM,0BAA0B,CAAA;AAC7D,OAAO,yBAAyB,MAAM,6BAA6B,CAAA;AACnE,OAAO,gBAAgB,MAAM,oBAAoB,CAAA;AACjD,OAAO,cAAc,MAAM,kBAAkB,CAAA;AAE7C,OAAO,EACL,eAAe,EACf,qBAAqB,EACrB,sBAAsB,EACtB,yBAAyB,EACzB,gBAAgB,EAChB,cAAc,GACf,CAAA;;;;;;;;;AACD,wBAOC;AACD,YAAY,EACV,iBAAiB,EACjB,wBAAwB,EACxB,wBAAwB,EACxB,uBAAuB,EACvB,4BAA4B,GAC7B,MAAM,SAAS,CAAA;AAGhB,YAAY,EAAE,SAAS,IAAI,iBAAiB,EAAE,MAAM,8BAA8B,CAAA"}

28
node_modules/@supabase/postgrest-js/dist/cjs/index.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.PostgrestError = exports.PostgrestBuilder = exports.PostgrestTransformBuilder = exports.PostgrestFilterBuilder = exports.PostgrestQueryBuilder = exports.PostgrestClient = void 0;
// Always update wrapper.mjs when updating this file.
const PostgrestClient_1 = __importDefault(require("./PostgrestClient"));
exports.PostgrestClient = PostgrestClient_1.default;
const PostgrestQueryBuilder_1 = __importDefault(require("./PostgrestQueryBuilder"));
exports.PostgrestQueryBuilder = PostgrestQueryBuilder_1.default;
const PostgrestFilterBuilder_1 = __importDefault(require("./PostgrestFilterBuilder"));
exports.PostgrestFilterBuilder = PostgrestFilterBuilder_1.default;
const PostgrestTransformBuilder_1 = __importDefault(require("./PostgrestTransformBuilder"));
exports.PostgrestTransformBuilder = PostgrestTransformBuilder_1.default;
const PostgrestBuilder_1 = __importDefault(require("./PostgrestBuilder"));
exports.PostgrestBuilder = PostgrestBuilder_1.default;
const PostgrestError_1 = __importDefault(require("./PostgrestError"));
exports.PostgrestError = PostgrestError_1.default;
exports.default = {
PostgrestClient: PostgrestClient_1.default,
PostgrestQueryBuilder: PostgrestQueryBuilder_1.default,
PostgrestFilterBuilder: PostgrestFilterBuilder_1.default,
PostgrestTransformBuilder: PostgrestTransformBuilder_1.default,
PostgrestBuilder: PostgrestBuilder_1.default,
PostgrestError: PostgrestError_1.default,
};
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,qDAAqD;AACrD,wEAA+C;AAQ7C,0BARK,yBAAe,CAQL;AAPjB,oFAA2D;AAQzD,gCARK,+BAAqB,CAQL;AAPvB,sFAA6D;AAQ3D,iCARK,gCAAsB,CAQL;AAPxB,4FAAmE;AAQjE,oCARK,mCAAyB,CAQL;AAP3B,0EAAiD;AAQ/C,2BARK,0BAAgB,CAQL;AAPlB,sEAA6C;AAQ3C,yBARK,wBAAc,CAQL;AAEhB,kBAAe;IACb,eAAe,EAAf,yBAAe;IACf,qBAAqB,EAArB,+BAAqB;IACrB,sBAAsB,EAAtB,gCAAsB;IACtB,yBAAyB,EAAzB,mCAAyB;IACzB,gBAAgB,EAAhB,0BAAgB;IAChB,cAAc,EAAd,wBAAc;CACf,CAAA"}

View File

@@ -0,0 +1,261 @@
import { SimplifyDeep } from '../types';
import { JsonPathToAccessor } from './utils';
/**
* Parses a query.
* A query is a sequence of nodes, separated by `,`, ensuring that there is
* no remaining input after all nodes have been parsed.
*
* Returns an array of parsed nodes, or an error.
*/
export declare type ParseQuery<Query extends string> = string extends Query ? GenericStringError : ParseNodes<EatWhitespace<Query>> extends [infer Nodes, `${infer Remainder}`] ? Nodes extends Ast.Node[] ? EatWhitespace<Remainder> extends '' ? SimplifyDeep<Nodes> : ParserError<`Unexpected input: ${Remainder}`> : ParserError<'Invalid nodes array structure'> : ParseNodes<EatWhitespace<Query>>;
/**
* Notes: all `Parse*` types assume that their input strings have their whitespace
* removed. They return tuples of ["Return Value", "Remainder of text"] or
* a `ParserError`.
*/
/**
* Parses a sequence of nodes, separated by `,`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"] or an error.
*/
declare type ParseNodes<Input extends string> = string extends Input ? GenericStringError : ParseNodesHelper<Input, []>;
declare type ParseNodesHelper<Input extends string, Nodes extends Ast.Node[]> = ParseNode<Input> extends [
infer Node,
`${infer Remainder}`
] ? Node extends Ast.Node ? EatWhitespace<Remainder> extends `,${infer Remainder}` ? ParseNodesHelper<EatWhitespace<Remainder>, [...Nodes, Node]> : [[...Nodes, Node], EatWhitespace<Remainder>] : ParserError<'Invalid node type in nodes helper'> : ParseNode<Input>;
/**
* Parses a node.
* A node is one of the following:
* - `*`
* - a field, as defined above
* - a renamed field, `renamed_field:field`
* - a spread field, `...field`
*/
declare type ParseNode<Input extends string> = Input extends '' ? ParserError<'Empty string'> : Input extends `*${infer Remainder}` ? [Ast.StarNode, EatWhitespace<Remainder>] : Input extends `...${infer Remainder}` ? ParseField<EatWhitespace<Remainder>> extends [infer TargetField, `${infer Remainder}`] ? TargetField extends Ast.FieldNode ? [{
type: 'spread';
target: TargetField;
}, EatWhitespace<Remainder>] : ParserError<'Invalid target field type in spread'> : ParserError<`Unable to parse spread resource at \`${Input}\``> : ParseIdentifier<Input> extends [infer NameOrAlias, `${infer Remainder}`] ? EatWhitespace<Remainder> extends `::${infer _}` ? ParseField<Input> : EatWhitespace<Remainder> extends `:${infer Remainder}` ? ParseField<EatWhitespace<Remainder>> extends [infer Field, `${infer Remainder}`] ? Field extends Ast.FieldNode ? [Omit<Field, 'alias'> & {
alias: NameOrAlias;
}, EatWhitespace<Remainder>] : ParserError<'Invalid field type in alias parsing'> : ParserError<`Unable to parse renamed field at \`${Input}\``> : ParseField<Input> : ParserError<`Expected identifier at \`${Input}\``>;
/**
* Parses a field without preceding alias.
* A field is one of the following:
* - a top-level `count` field: https://docs.postgrest.org/en/v12/references/api/aggregate_functions.html#the-case-of-count
* - a field with an embedded resource
* - `field(nodes)`
* - `field!hint(nodes)`
* - `field!inner(nodes)`
* - `field!left(nodes)`
* - `field!hint!inner(nodes)`
* - `field!hint!left(nodes)`
* - a field without an embedded resource (see {@link ParseNonEmbeddedResourceField})
*/
declare type ParseField<Input extends string> = Input extends '' ? ParserError<'Empty string'> : ParseIdentifier<Input> extends [infer Name, `${infer Remainder}`] ? Name extends 'count' ? ParseCountField<Input> : Remainder extends `!inner${infer Remainder}` ? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [
{
type: 'field';
name: Name;
innerJoin: true;
children: Children;
},
Remainder
] : ParserError<'Invalid children array in inner join'> : CreateParserErrorIfRequired<ParseEmbeddedResource<EatWhitespace<Remainder>>, `Expected embedded resource after "!inner" at \`${Remainder}\``> : EatWhitespace<Remainder> extends `!left${infer Remainder}` ? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [
{
type: 'field';
name: Name;
children: Children;
},
EatWhitespace<Remainder>
] : ParserError<'Invalid children array in left join'> : CreateParserErrorIfRequired<ParseEmbeddedResource<EatWhitespace<Remainder>>, `Expected embedded resource after "!left" at \`${EatWhitespace<Remainder>}\``> : EatWhitespace<Remainder> extends `!${infer Remainder}` ? ParseIdentifier<EatWhitespace<Remainder>> extends [infer Hint, `${infer Remainder}`] ? EatWhitespace<Remainder> extends `!inner${infer Remainder}` ? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
] ? Children extends Ast.Node[] ? [
{
type: 'field';
name: Name;
hint: Hint;
innerJoin: true;
children: Children;
},
EatWhitespace<Remainder>
] : ParserError<'Invalid children array in hint inner join'> : ParseEmbeddedResource<EatWhitespace<Remainder>> : ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
] ? Children extends Ast.Node[] ? [
{
type: 'field';
name: Name;
hint: Hint;
children: Children;
},
EatWhitespace<Remainder>
] : ParserError<'Invalid children array in hint'> : ParseEmbeddedResource<EatWhitespace<Remainder>> : ParserError<`Expected identifier after "!" at \`${EatWhitespace<Remainder>}\``> : EatWhitespace<Remainder> extends `(${infer _}` ? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [
{
type: 'field';
name: Name;
children: Children;
},
EatWhitespace<Remainder>
] : ParserError<'Invalid children array in field'> : ParseEmbeddedResource<EatWhitespace<Remainder>> : ParseNonEmbeddedResourceField<Input> : ParserError<`Expected identifier at \`${Input}\``>;
declare type ParseCountField<Input extends string> = ParseIdentifier<Input> extends [
'count',
`${infer Remainder}`
] ? (EatWhitespace<Remainder> extends `()${infer Remainder_}` ? EatWhitespace<Remainder_> : EatWhitespace<Remainder>) extends `${infer Remainder}` ? Remainder extends `::${infer _}` ? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`] ? [
{
type: 'field';
name: 'count';
aggregateFunction: 'count';
castType: CastType;
},
Remainder
] : ParseFieldTypeCast<Remainder> : [{
type: 'field';
name: 'count';
aggregateFunction: 'count';
}, Remainder] : never : ParserError<`Expected "count" at \`${Input}\``>;
/**
* Parses an embedded resource, which is an opening `(`, followed by a sequence of
* 0 or more nodes separated by `,`, then a closing `)`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"], an error,
* or the original string input indicating that no opening `(` was found.
*/
declare type ParseEmbeddedResource<Input extends string> = Input extends `(${infer Remainder}` ? EatWhitespace<Remainder> extends `)${infer Remainder}` ? [[], EatWhitespace<Remainder>] : ParseNodes<EatWhitespace<Remainder>> extends [infer Nodes, `${infer Remainder}`] ? Nodes extends Ast.Node[] ? EatWhitespace<Remainder> extends `)${infer Remainder}` ? [Nodes, EatWhitespace<Remainder>] : ParserError<`Expected ")" at \`${EatWhitespace<Remainder>}\``> : ParserError<'Invalid nodes array in embedded resource'> : ParseNodes<EatWhitespace<Remainder>> : ParserError<`Expected "(" at \`${Input}\``>;
/**
* Parses a field excluding embedded resources, without preceding field renaming.
* This is one of the following:
* - `field`
* - `field.aggregate()`
* - `field.aggregate()::type`
* - `field::type`
* - `field::type.aggregate()`
* - `field::type.aggregate()::type`
* - `field->json...`
* - `field->json.aggregate()`
* - `field->json.aggregate()::type`
* - `field->json::type`
* - `field->json::type.aggregate()`
* - `field->json::type.aggregate()::type`
*/
declare type ParseNonEmbeddedResourceField<Input extends string> = ParseIdentifier<Input> extends [
infer Name,
`${infer Remainder}`
] ? (Remainder extends `->${infer PathAndRest}` ? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
] ? [
{
type: 'field';
name: Name;
alias: PropertyName;
castType: PropertyType;
jsonPath: JsonPathToAccessor<PathAndRest extends `${infer Path},${string}` ? Path : PathAndRest>;
},
Remainder
] : ParseJsonAccessor<Remainder> : [{
type: 'field';
name: Name;
}, Remainder]) extends infer Parsed ? Parsed extends [infer Field, `${infer Remainder}`] ? (Remainder extends `::${infer _}` ? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`] ? [Omit<Field, 'castType'> & {
castType: CastType;
}, Remainder] : ParseFieldTypeCast<Remainder> : [Field, Remainder]) extends infer Parsed ? Parsed extends [infer Field, `${infer Remainder}`] ? Remainder extends `.${infer _}` ? ParseFieldAggregation<Remainder> extends [
infer AggregateFunction,
`${infer Remainder}`
] ? Remainder extends `::${infer _}` ? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`] ? [
Omit<Field, 'castType'> & {
aggregateFunction: AggregateFunction;
castType: CastType;
},
Remainder
] : ParseFieldTypeCast<Remainder> : [Field & {
aggregateFunction: AggregateFunction;
}, Remainder] : ParseFieldAggregation<Remainder> : [Field, Remainder] : Parsed : never : Parsed : never : ParserError<`Expected identifier at \`${Input}\``>;
/**
* Parses a JSON property accessor of the shape `->a->b->c`. The last accessor in
* the series may convert to text by using the ->> operator instead of ->.
*
* Returns a tuple of ["Last property name", "Last property type", "Remainder of text"]
*/
declare type ParseJsonAccessor<Input extends string> = Input extends `->${infer Remainder}` ? Remainder extends `>${infer Remainder}` ? ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`] ? [Name, 'text', EatWhitespace<Remainder>] : ParserError<'Expected property name after `->>`'> : ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`] ? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
] ? [PropertyName, PropertyType, EatWhitespace<Remainder>] : [Name, 'json', EatWhitespace<Remainder>] : ParserError<'Expected property name after `->`'> : ParserError<'Expected ->'>;
/**
* Parses a field typecast (`::type`), returning a tuple of ["Type", "Remainder of text"].
*/
declare type ParseFieldTypeCast<Input extends string> = EatWhitespace<Input> extends `::${infer Remainder}` ? ParseIdentifier<EatWhitespace<Remainder>> extends [`${infer CastType}`, `${infer Remainder}`] ? [CastType, EatWhitespace<Remainder>] : ParserError<`Invalid type for \`::\` operator at \`${Remainder}\``> : ParserError<'Expected ::'>;
/**
* Parses a field aggregation (`.max()`), returning a tuple of ["Aggregate function", "Remainder of text"]
*/
declare type ParseFieldAggregation<Input extends string> = EatWhitespace<Input> extends `.${infer Remainder}` ? ParseIdentifier<EatWhitespace<Remainder>> extends [
`${infer FunctionName}`,
`${infer Remainder}`
] ? FunctionName extends Token.AggregateFunction ? EatWhitespace<Remainder> extends `()${infer Remainder}` ? [FunctionName, EatWhitespace<Remainder>] : ParserError<`Expected \`()\` after \`.\` operator \`${FunctionName}\``> : ParserError<`Invalid type for \`.\` operator \`${FunctionName}\``> : ParserError<`Invalid type for \`.\` operator at \`${Remainder}\``> : ParserError<'Expected .'>;
/**
* Parses a (possibly double-quoted) identifier.
* Identifiers are sequences of 1 or more letters.
*/
declare type ParseIdentifier<Input extends string> = ParseLetters<Input> extends [
infer Name,
`${infer Remainder}`
] ? [Name, EatWhitespace<Remainder>] : ParseQuotedLetters<Input> extends [infer Name, `${infer Remainder}`] ? [Name, EatWhitespace<Remainder>] : ParserError<`No (possibly double-quoted) identifier at \`${Input}\``>;
/**
* Parse a consecutive sequence of 1 or more letter, where letters are `[0-9a-zA-Z_]`.
*/
declare type ParseLetters<Input extends string> = string extends Input ? GenericStringError : ParseLettersHelper<Input, ''> extends [`${infer Letters}`, `${infer Remainder}`] ? Letters extends '' ? ParserError<`Expected letter at \`${Input}\``> : [Letters, Remainder] : ParseLettersHelper<Input, ''>;
declare type ParseLettersHelper<Input extends string, Acc extends string> = string extends Input ? GenericStringError : Input extends `${infer L}${infer Remainder}` ? L extends Token.Letter ? ParseLettersHelper<Remainder, `${Acc}${L}`> : [Acc, Input] : [Acc, ''];
/**
* Parse a consecutive sequence of 1 or more double-quoted letters,
* where letters are `[^"]`.
*/
declare type ParseQuotedLetters<Input extends string> = string extends Input ? GenericStringError : Input extends `"${infer Remainder}` ? ParseQuotedLettersHelper<Remainder, ''> extends [`${infer Letters}`, `${infer Remainder}`] ? Letters extends '' ? ParserError<`Expected string at \`${Remainder}\``> : [Letters, Remainder] : ParseQuotedLettersHelper<Remainder, ''> : ParserError<`Not a double-quoted string at \`${Input}\``>;
declare type ParseQuotedLettersHelper<Input extends string, Acc extends string> = string extends Input ? GenericStringError : Input extends `${infer L}${infer Remainder}` ? L extends '"' ? [Acc, Remainder] : ParseQuotedLettersHelper<Remainder, `${Acc}${L}`> : ParserError<`Missing closing double-quote in \`"${Acc}${Input}\``>;
/**
* Trims whitespace from the left of the input.
*/
declare type EatWhitespace<Input extends string> = string extends Input ? GenericStringError : Input extends `${Token.Whitespace}${infer Remainder}` ? EatWhitespace<Remainder> : Input;
/**
* Creates a new {@link ParserError} if the given input is not already a parser error.
*/
declare type CreateParserErrorIfRequired<Input, Message extends string> = Input extends ParserError<string> ? Input : ParserError<Message>;
/**
* Parser errors.
*/
export declare type ParserError<Message extends string> = {
error: true;
} & Message;
declare type GenericStringError = ParserError<'Received a generic string'>;
export declare namespace Ast {
type Node = FieldNode | StarNode | SpreadNode;
type FieldNode = {
type: 'field';
name: string;
alias?: string;
hint?: string;
innerJoin?: true;
castType?: string;
jsonPath?: string;
aggregateFunction?: Token.AggregateFunction;
children?: Node[];
};
type StarNode = {
type: 'star';
};
type SpreadNode = {
type: 'spread';
target: FieldNode & {
children: Node[];
};
};
}
declare namespace Token {
export type Whitespace = ' ' | '\n' | '\t';
type LowerAlphabet = 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z';
type Alphabet = LowerAlphabet | Uppercase<LowerAlphabet>;
type Digit = '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0';
export type Letter = Alphabet | Digit | '_';
export type AggregateFunction = 'count' | 'sum' | 'avg' | 'min' | 'max';
export {};
}
export {};
//# sourceMappingURL=parser.d.ts.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,5 @@
"use strict";
// Credits to @bnjmnt4n (https://www.npmjs.com/package/postgrest-query)
// See https://github.com/PostgREST/postgrest/blob/2f91853cb1de18944a4556df09e52450b881cfb3/src/PostgREST/ApiRequest/QueryParams.hs#L282-L284
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=parser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"parser.js","sourceRoot":"","sources":["../../../src/select-query-parser/parser.ts"],"names":[],"mappings":";AAAA,uEAAuE;AACvE,6IAA6I"}

View File

@@ -0,0 +1,149 @@
import { GenericTable } from '../types';
import { ContainsNull, GenericRelationship, PostgreSQLTypes } from './types';
import { Ast, ParseQuery } from './parser';
import { AggregateFunctions, ExtractFirstProperty, GenericSchema, IsNonEmptyArray, Prettify, TablesAndViews, TypeScriptTypes } from './types';
import { CheckDuplicateEmbededReference, GetFieldNodeResultName, IsAny, IsRelationNullable, IsStringUnion, JsonPathToType, ResolveRelationship, SelectQueryError } from './utils';
/**
* Main entry point for constructing the result type of a PostgREST query.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Query - The select query string literal to parse.
*/
export declare type GetResult<Schema extends GenericSchema, Row extends Record<string, unknown>, RelationName, Relationships, Query extends string> = IsAny<Schema> extends true ? ParseQuery<Query> extends infer ParsedQuery ? ParsedQuery extends Ast.Node[] ? RelationName extends string ? ProcessNodesWithoutSchema<ParsedQuery> : any : ParsedQuery : any : Relationships extends null ? ParseQuery<Query> extends infer ParsedQuery ? ParsedQuery extends Ast.Node[] ? RPCCallNodes<ParsedQuery, RelationName extends string ? RelationName : 'rpc_call', Row> : ParsedQuery : Row : ParseQuery<Query> extends infer ParsedQuery ? ParsedQuery extends Ast.Node[] ? RelationName extends string ? Relationships extends GenericRelationship[] ? ProcessNodes<Schema, Row, RelationName, Relationships, ParsedQuery> : SelectQueryError<'Invalid Relationships cannot infer result type'> : SelectQueryError<'Invalid RelationName cannot infer result type'> : ParsedQuery : never;
declare type ProcessSimpleFieldWithoutSchema<Field extends Ast.FieldNode> = Field['aggregateFunction'] extends AggregateFunctions ? {
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes ? TypeScriptTypes<Field['castType']> : number;
} : {
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes ? TypeScriptTypes<Field['castType']> : any;
};
declare type ProcessFieldNodeWithoutSchema<Node extends Ast.FieldNode> = IsNonEmptyArray<Node['children']> extends true ? {
[K in GetFieldNodeResultName<Node>]: Node['children'] extends Ast.Node[] ? ProcessNodesWithoutSchema<Node['children']>[] : ProcessSimpleFieldWithoutSchema<Node>;
} : ProcessSimpleFieldWithoutSchema<Node>;
/**
* Processes a single Node without schema and returns the resulting TypeScript type.
*/
declare type ProcessNodeWithoutSchema<Node extends Ast.Node> = Node extends Ast.StarNode ? any : Node extends Ast.SpreadNode ? Node['target']['children'] extends Ast.StarNode[] ? any : Node['target']['children'] extends Ast.FieldNode[] ? {
[P in Node['target']['children'][number] as GetFieldNodeResultName<P>]: P['castType'] extends PostgreSQLTypes ? TypeScriptTypes<P['castType']> : any;
} : any : Node extends Ast.FieldNode ? ProcessFieldNodeWithoutSchema<Node> : any;
/**
* Processes nodes when Schema is any, providing basic type inference
*/
declare type ProcessNodesWithoutSchema<Nodes extends Ast.Node[], Acc extends Record<string, unknown> = {}> = Nodes extends [infer FirstNode, ...infer RestNodes] ? FirstNode extends Ast.Node ? RestNodes extends Ast.Node[] ? ProcessNodeWithoutSchema<FirstNode> extends infer FieldResult ? FieldResult extends Record<string, unknown> ? ProcessNodesWithoutSchema<RestNodes, Acc & FieldResult> : FieldResult : any : any : any : Prettify<Acc>;
/**
* Processes a single Node from a select chained after a rpc call
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current rpc function
* @param NodeType - The Node to process.
*/
export declare type ProcessRPCNode<Row extends Record<string, unknown>, RelationName extends string, NodeType extends Ast.Node> = NodeType['type'] extends Ast.StarNode['type'] ? Row : NodeType['type'] extends Ast.FieldNode['type'] ? ProcessSimpleField<Row, RelationName, Extract<NodeType, Ast.FieldNode>> : SelectQueryError<'RPC Unsupported node type.'>;
/**
* Process select call that can be chained after an rpc call
*/
export declare type RPCCallNodes<Nodes extends Ast.Node[], RelationName extends string, Row extends Record<string, unknown>, Acc extends Record<string, unknown> = {}> = Nodes extends [infer FirstNode, ...infer RestNodes] ? FirstNode extends Ast.Node ? RestNodes extends Ast.Node[] ? ProcessRPCNode<Row, RelationName, FirstNode> extends infer FieldResult ? FieldResult extends Record<string, unknown> ? RPCCallNodes<RestNodes, RelationName, Row, Acc & FieldResult> : FieldResult extends SelectQueryError<infer E> ? SelectQueryError<E> : SelectQueryError<'Could not retrieve a valid record or error value'> : SelectQueryError<'Processing node failed.'> : SelectQueryError<'Invalid rest nodes array in RPC call'> : SelectQueryError<'Invalid first node in RPC call'> : Prettify<Acc>;
/**
* Recursively processes an array of Nodes and accumulates the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Nodes - An array of AST nodes to process.
* @param Acc - Accumulator for the constructed type.
*/
export declare type ProcessNodes<Schema extends GenericSchema, Row extends Record<string, unknown>, RelationName extends string, Relationships extends GenericRelationship[], Nodes extends Ast.Node[], Acc extends Record<string, unknown> = {}> = CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes> extends false ? Nodes extends [infer FirstNode, ...infer RestNodes] ? FirstNode extends Ast.Node ? RestNodes extends Ast.Node[] ? ProcessNode<Schema, Row, RelationName, Relationships, FirstNode> extends infer FieldResult ? FieldResult extends Record<string, unknown> ? ProcessNodes<Schema, Row, RelationName, Relationships, RestNodes, Acc & FieldResult> : FieldResult extends SelectQueryError<infer E> ? SelectQueryError<E> : SelectQueryError<'Could not retrieve a valid record or error value'> : SelectQueryError<'Processing node failed.'> : SelectQueryError<'Invalid rest nodes array type in ProcessNodes'> : SelectQueryError<'Invalid first node type in ProcessNodes'> : Prettify<Acc> : Prettify<CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes>>;
/**
* Processes a single Node and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param NodeType - The Node to process.
*/
export declare type ProcessNode<Schema extends GenericSchema, Row extends Record<string, unknown>, RelationName extends string, Relationships extends GenericRelationship[], NodeType extends Ast.Node> = NodeType['type'] extends Ast.StarNode['type'] ? Row : NodeType['type'] extends Ast.SpreadNode['type'] ? ProcessSpreadNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.SpreadNode>> : NodeType['type'] extends Ast.FieldNode['type'] ? ProcessFieldNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.FieldNode>> : SelectQueryError<'Unsupported node type.'>;
/**
* Processes a FieldNode and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
declare type ProcessFieldNode<Schema extends GenericSchema, Row extends Record<string, unknown>, RelationName extends string, Relationships extends GenericRelationship[], Field extends Ast.FieldNode> = Field['children'] extends [] ? {} : IsNonEmptyArray<Field['children']> extends true ? ProcessEmbeddedResource<Schema, Relationships, Field, RelationName> : ProcessSimpleField<Row, RelationName, Field>;
declare type ResolveJsonPathType<Value, Path extends string | undefined, CastType extends PostgreSQLTypes> = Path extends string ? JsonPathToType<Value, Path> extends never ? TypeScriptTypes<CastType> : JsonPathToType<Value, Path> extends infer PathResult ? PathResult extends string ? PathResult : IsStringUnion<PathResult> extends true ? PathResult : CastType extends 'json' ? PathResult : TypeScriptTypes<CastType> : TypeScriptTypes<CastType> : TypeScriptTypes<CastType>;
/**
* Processes a simple field (without embedded resources).
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Field - The FieldNode to process.
*/
declare type ProcessSimpleField<Row extends Record<string, unknown>, RelationName extends string, Field extends Ast.FieldNode> = Field['name'] extends keyof Row | 'count' ? Field['aggregateFunction'] extends AggregateFunctions ? {
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes ? TypeScriptTypes<Field['castType']> : number;
} : {
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes ? ResolveJsonPathType<Row[Field['name']], Field['jsonPath'], Field['castType']> : Row[Field['name']];
} : SelectQueryError<`column '${Field['name']}' does not exist on '${RelationName}'.`>;
/**
* Processes an embedded resource (relation).
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
export declare type ProcessEmbeddedResource<Schema extends GenericSchema, Relationships extends GenericRelationship[], Field extends Ast.FieldNode, CurrentTableOrView extends keyof TablesAndViews<Schema> & string> = ResolveRelationship<Schema, Relationships, Field, CurrentTableOrView> extends infer Resolved ? Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>;
relation: GenericRelationship & {
match: 'refrel' | 'col' | 'fkname';
};
direction: string;
} ? ProcessEmbeddedResourceResult<Schema, Resolved, Field, CurrentTableOrView> : {
[K in GetFieldNodeResultName<Field>]: Resolved;
} : {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to resolve relationship.'> & string;
};
/**
* Helper type to process the result of an embedded resource.
*/
declare type ProcessEmbeddedResourceResult<Schema extends GenericSchema, Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>;
relation: GenericRelationship & {
match: 'refrel' | 'col' | 'fkname';
};
direction: string;
}, Field extends Ast.FieldNode, CurrentTableOrView extends keyof TablesAndViews<Schema>> = ProcessNodes<Schema, Resolved['referencedTable']['Row'], Field['name'], Resolved['referencedTable']['Relationships'], Field['children'] extends undefined ? [] : Exclude<Field['children'], undefined> extends Ast.Node[] ? Exclude<Field['children'], undefined> : []> extends infer ProcessedChildren ? {
[K in GetFieldNodeResultName<Field>]: Resolved['direction'] extends 'forward' ? Field extends {
innerJoin: true;
} ? Resolved['relation']['isOneToOne'] extends true ? ProcessedChildren : ProcessedChildren[] : Resolved['relation']['isOneToOne'] extends true ? ProcessedChildren | null : ProcessedChildren[] : Resolved['relation']['referencedRelation'] extends CurrentTableOrView ? Resolved['relation']['match'] extends 'col' ? IsRelationNullable<TablesAndViews<Schema>[CurrentTableOrView], Resolved['relation']> extends true ? ProcessedChildren | null : ProcessedChildren : ProcessedChildren[] : IsRelationNullable<TablesAndViews<Schema>[CurrentTableOrView], Resolved['relation']> extends true ? Field extends {
innerJoin: true;
} ? ProcessedChildren : ProcessedChildren | null : ProcessedChildren;
} : {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to process embedded resource nodes.'> & string;
};
/**
* Processes a SpreadNode by processing its target node.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Spread - The SpreadNode to process.
*/
declare type ProcessSpreadNode<Schema extends GenericSchema, Row extends Record<string, unknown>, RelationName extends string, Relationships extends GenericRelationship[], Spread extends Ast.SpreadNode> = ProcessNode<Schema, Row, RelationName, Relationships, Spread['target']> extends infer Result ? Result extends SelectQueryError<infer E> ? SelectQueryError<E> : ExtractFirstProperty<Result> extends unknown[] ? {
[K in Spread['target']['name']]: SelectQueryError<`"${RelationName}" and "${Spread['target']['name']}" do not form a many-to-one or one-to-one relationship spread not possible`>;
} : ProcessSpreadNodeResult<Result> : never;
/**
* Helper type to process the result of a spread node.
*/
declare type ProcessSpreadNodeResult<Result> = Result extends Record<string, SelectQueryError<string> | null> ? Result : ExtractFirstProperty<Result> extends infer SpreadedObject ? ContainsNull<SpreadedObject> extends true ? Exclude<{
[K in keyof SpreadedObject]: SpreadedObject[K] | null;
}, null> : Exclude<{
[K in keyof SpreadedObject]: SpreadedObject[K];
}, null> : SelectQueryError<'An error occurred spreading the object'>;
export {};
//# sourceMappingURL=result.d.ts.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=result.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"result.js","sourceRoot":"","sources":["../../../src/select-query-parser/result.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,31 @@
import type { GenericRelationship, GenericSchema, GenericTable, Prettify } from '../types';
export type { GenericRelationship, GenericSchema, GenericTable, Prettify };
export declare type AggregateWithoutColumnFunctions = 'count';
export declare type AggregateWithColumnFunctions = 'sum' | 'avg' | 'min' | 'max' | AggregateWithoutColumnFunctions;
export declare type AggregateFunctions = AggregateWithColumnFunctions;
export declare type Json = string | number | boolean | null | {
[key: string]: Json | undefined;
} | Json[];
declare type PostgresSQLNumberTypes = 'int2' | 'int4' | 'int8' | 'float4' | 'float8' | 'numeric';
declare type PostgresSQLStringTypes = 'bytea' | 'bpchar' | 'varchar' | 'date' | 'text' | 'citext' | 'time' | 'timetz' | 'timestamp' | 'timestamptz' | 'uuid' | 'vector';
declare type SingleValuePostgreSQLTypes = PostgresSQLNumberTypes | PostgresSQLStringTypes | 'bool' | 'json' | 'jsonb' | 'void' | 'record' | string;
declare type ArrayPostgreSQLTypes = `_${SingleValuePostgreSQLTypes}`;
declare type TypeScriptSingleValueTypes<T extends SingleValuePostgreSQLTypes> = T extends 'bool' ? boolean : T extends PostgresSQLNumberTypes ? number : T extends PostgresSQLStringTypes ? string : T extends 'json' | 'jsonb' ? Json : T extends 'void' ? undefined : T extends 'record' ? Record<string, unknown> : unknown;
declare type StripUnderscore<T extends string> = T extends `_${infer U}` ? U : T;
export declare type PostgreSQLTypes = SingleValuePostgreSQLTypes | ArrayPostgreSQLTypes;
export declare type TypeScriptTypes<T extends PostgreSQLTypes> = T extends ArrayPostgreSQLTypes ? TypeScriptSingleValueTypes<StripUnderscore<Extract<T, SingleValuePostgreSQLTypes>>>[] : TypeScriptSingleValueTypes<T>;
export declare type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never;
export declare type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R ? R : never;
export declare type Push<T extends any[], V> = [...T, V];
export declare type UnionToTuple<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = N extends true ? [] : Push<UnionToTuple<Exclude<T, L>>, L>;
export declare type UnionToArray<T> = UnionToTuple<T>;
export declare type ExtractFirstProperty<T> = T extends {
[K in keyof T]: infer U;
} ? U : never;
export declare type ContainsNull<T> = null extends T ? true : false;
export declare type IsNonEmptyArray<T> = Exclude<T, undefined> extends readonly [unknown, ...unknown[]] ? true : false;
export declare type TablesAndViews<Schema extends GenericSchema> = Schema['Tables'] & Exclude<Schema['Views'], ''>;
export declare type GetTableRelationships<Schema extends GenericSchema, Tname extends string> = TablesAndViews<Schema>[Tname] extends {
Relationships: infer R;
} ? R : false;
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/select-query-parser/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,aAAa,EAAE,YAAY,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAE1F,YAAY,EAAE,mBAAmB,EAAE,aAAa,EAAE,YAAY,EAAE,QAAQ,EAAE,CAAA;AAE1E,oBAAY,+BAA+B,GAAG,OAAO,CAAA;AAErD,oBAAY,4BAA4B,GACpC,KAAK,GACL,KAAK,GACL,KAAK,GACL,KAAK,GACL,+BAA+B,CAAA;AAEnC,oBAAY,kBAAkB,GAAG,4BAA4B,CAAA;AAE7D,oBAAY,IAAI,GACZ,MAAM,GACN,MAAM,GACN,OAAO,GACP,IAAI,GACJ;IACE,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAA;CAChC,GACD,IAAI,EAAE,CAAA;AAEV,aAAK,sBAAsB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,QAAQ,GAAG,QAAQ,GAAG,SAAS,CAAA;AAExF,aAAK,sBAAsB,GACvB,OAAO,GACP,QAAQ,GACR,SAAS,GACT,MAAM,GACN,MAAM,GACN,QAAQ,GACR,MAAM,GACN,QAAQ,GACR,WAAW,GACX,aAAa,GACb,MAAM,GACN,QAAQ,CAAA;AAEZ,aAAK,0BAA0B,GAC3B,sBAAsB,GACtB,sBAAsB,GACtB,MAAM,GACN,MAAM,GACN,OAAO,GACP,MAAM,GACN,QAAQ,GACR,MAAM,CAAA;AAEV,aAAK,oBAAoB,GAAG,IAAI,0BAA0B,EAAE,CAAA;AAE5D,aAAK,0BAA0B,CAAC,CAAC,SAAS,0BAA0B,IAAI,CAAC,SAAS,MAAM,GACpF,OAAO,GACP,CAAC,SAAS,sBAAsB,GAChC,MAAM,GACN,CAAC,SAAS,sBAAsB,GAChC,MAAM,GACN,CAAC,SAAS,MAAM,GAAG,OAAO,GAC1B,IAAI,GACJ,CAAC,SAAS,MAAM,GAChB,SAAS,GACT,CAAC,SAAS,QAAQ,GAClB,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GACvB,OAAO,CAAA;AAEX,aAAK,eAAe,CAAC,CAAC,SAAS,MAAM,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAA;AAGxE,oBAAY,eAAe,GAAG,0BAA0B,GAAG,oBAAoB,CAAA;AAG/E,oBAAY,eAAe,CAAC,CAAC,SAAS,eAAe,IAAI,CAAC,SAAS,oBAAoB,GACnF,0BAA0B,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC,EAAE,0BAA0B,CAAC,CAAC,CAAC,EAAE,GACrF,0BAA0B,CAAC,CAAC,CAAC,CAAA;AAGjC,oBAAY,mBAAmB,CAAC,CAAC,IAAI,CAAC,CAAC,SAAS,GAAG,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,IAAI,GAAG,KAAK,CAAC,SAAS,CACpF,CAAC,EAAE,MAAM,CAAC,KACP,IAAI,GACL,CAAC,GACD,KAAK,CAAA;AAET,oBAAY,MAAM,CAAC,CAAC,IAAI,mBAAmB,CAAC,CAAC,SAAS,GAAG,GAAG,MAAM,CAAC,GAAG,KAAK,CAAC,SAAS,MAAM,MAAM,CAAC,GAC9F,CAAC,GACD,KAAK,CAAA;AAET,oBAAY,IAAI,CAAC,CAAC,SAAS,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAA;AAGhD,oBAAY,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,SAAS,IAAI,GAC/F,EAAE,GACF,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;AAExC,oBAAY,YAAY,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,CAAA;AAG7C,oBAAY,oBAAoB,CAAC,CAAC,IAAI,CAAC,SAAS;KAAG,CAAC,IAAI,MAAM,CAAC,GAAG,MAAM,CAAC;CAAE,GAAG,CAAC,GAAG,KAAK,CAAA;AAGvF,oBAAY,YAAY,CAAC,CAAC,IAAI,IAAI,SAAS,CAAC,GAAG,IAAI,GAAG,KAAK,CAAA;AAE3D,oBAAY,eAAe,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,EAAE,SAAS,CAAC,SAAS,SAAS,CAAC,OAAO,EAAE,GAAG,OAAO,EAAE,CAAC,GAC3F,IAAI,GACJ,KAAK,CAAA;AAGT,oBAAY,cAAc,CAAC,MAAM,SAAS,aAAa,IAAI,MAAM,CAAC,QAAQ,CAAC,GACzE,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAA;AAE9B,oBAAY,qBAAqB,CAC/B,MAAM,SAAS,aAAa,EAC5B,KAAK,SAAS,MAAM,IAClB,cAAc,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,SAAS;IAAE,aAAa,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,CAAC,GAAG,KAAK,CAAA"}

View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/select-query-parser/types.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,255 @@
import { Ast } from './parser';
import { AggregateFunctions, ContainsNull, GenericRelationship, GenericSchema, GenericTable, IsNonEmptyArray, TablesAndViews, UnionToArray } from './types';
export declare type IsAny<T> = 0 extends 1 & T ? true : false;
export declare type SelectQueryError<Message extends string> = {
error: true;
} & Message;
export declare type DeduplicateRelationships<T extends readonly unknown[]> = T extends readonly [
infer First,
...infer Rest
] ? First extends Rest[number] ? DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []> : [First, ...DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>] : T;
export declare type GetFieldNodeResultName<Field extends Ast.FieldNode> = Field['alias'] extends string ? Field['alias'] : Field['aggregateFunction'] extends AggregateFunctions ? Field['aggregateFunction'] : Field['name'];
declare type FilterRelationNodes<Nodes extends Ast.Node[]> = UnionToArray<{
[K in keyof Nodes]: Nodes[K] extends Ast.SpreadNode ? Nodes[K]['target'] : Nodes[K] extends Ast.FieldNode ? IsNonEmptyArray<Nodes[K]['children']> extends true ? Nodes[K] : never : never;
}[number]>;
declare type ResolveRelationships<Schema extends GenericSchema, RelationName extends string, Relationships extends GenericRelationship[], Nodes extends Ast.FieldNode[]> = UnionToArray<{
[K in keyof Nodes]: Nodes[K] extends Ast.FieldNode ? ResolveRelationship<Schema, Relationships, Nodes[K], RelationName> extends infer Relation ? Relation extends {
relation: {
referencedRelation: string;
foreignKeyName: string;
match: string;
};
from: string;
} ? {
referencedTable: Relation['relation']['referencedRelation'];
fkName: Relation['relation']['foreignKeyName'];
from: Relation['from'];
match: Relation['relation']['match'];
fieldName: GetFieldNodeResultName<Nodes[K]>;
} : Relation : never : never;
}>[0];
/**
* Checks if a relation is implicitly referenced twice, requiring disambiguation
*/
declare type IsDoubleReference<T, U> = T extends {
referencedTable: infer RT;
fieldName: infer FN;
match: infer M;
} ? M extends 'col' | 'refrel' ? U extends {
referencedTable: RT;
fieldName: FN;
match: M;
} ? true : false : false : false;
/**
* Compares one element with all other elements in the array to find duplicates
*/
declare type CheckDuplicates<Arr extends any[], Current> = Arr extends [infer Head, ...infer Tail] ? IsDoubleReference<Current, Head> extends true ? Head | CheckDuplicates<Tail, Current> : CheckDuplicates<Tail, Current> : never;
/**
* Iterates over the elements of the array to find duplicates
*/
declare type FindDuplicatesWithinDeduplicated<Arr extends any[]> = Arr extends [infer Head, ...infer Tail] ? CheckDuplicates<Tail, Head> | FindDuplicatesWithinDeduplicated<Tail> : never;
declare type FindDuplicates<Arr extends any[]> = FindDuplicatesWithinDeduplicated<DeduplicateRelationships<Arr>>;
export declare type CheckDuplicateEmbededReference<Schema extends GenericSchema, RelationName extends string, Relationships extends GenericRelationship[], Nodes extends Ast.Node[]> = FilterRelationNodes<Nodes> extends infer RelationsNodes ? RelationsNodes extends Ast.FieldNode[] ? ResolveRelationships<Schema, RelationName, Relationships, RelationsNodes> extends infer ResolvedRels ? ResolvedRels extends unknown[] ? FindDuplicates<ResolvedRels> extends infer Duplicates ? Duplicates extends never ? false : Duplicates extends {
fieldName: infer FieldName;
} ? FieldName extends string ? {
[K in FieldName]: SelectQueryError<`table "${RelationName}" specified more than once use hinting for desambiguation`>;
} : false : false : false : false : false : false : false;
/**
* Returns a boolean representing whether there is a foreign key referencing
* a given relation.
*/
declare type HasFKeyToFRel<FRelName, Relationships> = Relationships extends [infer R] ? R extends {
referencedRelation: FRelName;
} ? true : false : Relationships extends [infer R, ...infer Rest] ? HasFKeyToFRel<FRelName, [R]> extends true ? true : HasFKeyToFRel<FRelName, Rest> : false;
/**
* Checks if there is more than one relation to a given foreign relation name in the Relationships.
*/
declare type HasMultipleFKeysToFRelDeduplicated<FRelName, Relationships> = Relationships extends [
infer R,
...infer Rest
] ? R extends {
referencedRelation: FRelName;
} ? HasFKeyToFRel<FRelName, Rest> extends true ? true : HasMultipleFKeysToFRelDeduplicated<FRelName, Rest> : HasMultipleFKeysToFRelDeduplicated<FRelName, Rest> : false;
declare type HasMultipleFKeysToFRel<FRelName, Relationships extends unknown[]> = HasMultipleFKeysToFRelDeduplicated<FRelName, DeduplicateRelationships<Relationships>>;
declare type CheckRelationshipError<Schema extends GenericSchema, Relationships extends GenericRelationship[], CurrentTableOrView extends keyof TablesAndViews<Schema> & string, FoundRelation> = FoundRelation extends SelectQueryError<string> ? FoundRelation : FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName;
name: string;
};
direction: 'reverse';
} ? RelatedRelationName extends string ? HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true ? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`> : FoundRelation : never : FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName;
name: string;
};
direction: 'forward';
from: infer From;
} ? RelatedRelationName extends string ? From extends keyof TablesAndViews<Schema> & string ? HasMultipleFKeysToFRel<RelatedRelationName, TablesAndViews<Schema>[From]['Relationships']> extends true ? SelectQueryError<`Could not embed because more than one relationship was found for '${From}' and '${RelatedRelationName}' you need to hint the column with ${From}!<columnName> ?`> : FoundRelation : never : never : FoundRelation;
/**
* Resolves relationships for embedded resources and retrieves the referenced Table
*/
export declare type ResolveRelationship<Schema extends GenericSchema, Relationships extends GenericRelationship[], Field extends Ast.FieldNode, CurrentTableOrView extends keyof TablesAndViews<Schema> & string> = ResolveReverseRelationship<Schema, Relationships, Field, CurrentTableOrView> extends infer ReverseRelationship ? ReverseRelationship extends false ? CheckRelationshipError<Schema, Relationships, CurrentTableOrView, ResolveForwardRelationship<Schema, Field, CurrentTableOrView>> : CheckRelationshipError<Schema, Relationships, CurrentTableOrView, ReverseRelationship> : never;
/**
* Resolves reverse relationships (from children to parent)
*/
declare type ResolveReverseRelationship<Schema extends GenericSchema, Relationships extends GenericRelationship[], Field extends Ast.FieldNode, CurrentTableOrView extends keyof TablesAndViews<Schema> & string> = FindFieldMatchingRelationships<Schema, Relationships, Field> extends infer FoundRelation ? FoundRelation extends never ? false : FoundRelation extends {
referencedRelation: infer RelatedRelationName;
} ? RelatedRelationName extends string ? RelatedRelationName extends keyof TablesAndViews<Schema> ? FoundRelation extends {
hint: string;
} ? {
referencedTable: TablesAndViews<Schema>[RelatedRelationName];
relation: FoundRelation;
direction: 'reverse';
from: CurrentTableOrView;
} : HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true ? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`> : {
referencedTable: TablesAndViews<Schema>[RelatedRelationName];
relation: FoundRelation;
direction: 'reverse';
from: CurrentTableOrView;
} : SelectQueryError<`Relation '${RelatedRelationName}' not found in schema.`> : false : false : false;
export declare type FindMatchingTableRelationships<Schema extends GenericSchema, Relationships extends GenericRelationship[], value extends string> = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends {
referencedRelation: infer ReferencedRelation;
} ? ReferencedRelation extends keyof Schema['Tables'] ? R extends {
foreignKeyName: value;
} ? R & {
match: 'fkname';
} : R extends {
referencedRelation: value;
} ? R & {
match: 'refrel';
} : R extends {
columns: [value];
} ? R & {
match: 'col';
} : FindMatchingTableRelationships<Schema, Rest, value> : FindMatchingTableRelationships<Schema, Rest, value> : false : false : false;
export declare type FindMatchingViewRelationships<Schema extends GenericSchema, Relationships extends GenericRelationship[], value extends string> = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends {
referencedRelation: infer ReferencedRelation;
} ? ReferencedRelation extends keyof Schema['Views'] ? R extends {
foreignKeyName: value;
} ? R & {
match: 'fkname';
} : R extends {
referencedRelation: value;
} ? R & {
match: 'refrel';
} : R extends {
columns: [value];
} ? R & {
match: 'col';
} : FindMatchingViewRelationships<Schema, Rest, value> : FindMatchingViewRelationships<Schema, Rest, value> : false : false : false;
export declare type FindMatchingHintTableRelationships<Schema extends GenericSchema, Relationships extends GenericRelationship[], hint extends string, name extends string> = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends {
referencedRelation: infer ReferencedRelation;
} ? ReferencedRelation extends name ? R extends {
foreignKeyName: hint;
} ? R & {
match: 'fkname';
} : R extends {
referencedRelation: hint;
} ? R & {
match: 'refrel';
} : R extends {
columns: [hint];
} ? R & {
match: 'col';
} : FindMatchingHintTableRelationships<Schema, Rest, hint, name> : FindMatchingHintTableRelationships<Schema, Rest, hint, name> : false : false : false;
export declare type FindMatchingHintViewRelationships<Schema extends GenericSchema, Relationships extends GenericRelationship[], hint extends string, name extends string> = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends {
referencedRelation: infer ReferencedRelation;
} ? ReferencedRelation extends name ? R extends {
foreignKeyName: hint;
} ? R & {
match: 'fkname';
} : R extends {
referencedRelation: hint;
} ? R & {
match: 'refrel';
} : R extends {
columns: [hint];
} ? R & {
match: 'col';
} : FindMatchingHintViewRelationships<Schema, Rest, hint, name> : FindMatchingHintViewRelationships<Schema, Rest, hint, name> : false : false : false;
declare type IsColumnsNullable<Table extends Pick<GenericTable, 'Row'>, Columns extends (keyof Table['Row'])[]> = Columns extends [infer Column, ...infer Rest] ? Column extends keyof Table['Row'] ? ContainsNull<Table['Row'][Column]> extends true ? true : IsColumnsNullable<Table, Rest extends (keyof Table['Row'])[] ? Rest : []> : false : false;
export declare type IsRelationNullable<Table extends GenericTable, Relation extends GenericRelationship> = IsColumnsNullable<Table, Relation['columns']>;
declare type TableForwardRelationships<Schema extends GenericSchema, TName> = TName extends keyof TablesAndViews<Schema> ? UnionToArray<RecursivelyFindRelationships<Schema, TName, keyof TablesAndViews<Schema>>> extends infer R ? R extends (GenericRelationship & {
from: keyof TablesAndViews<Schema>;
})[] ? R : [] : [] : [];
declare type RecursivelyFindRelationships<Schema extends GenericSchema, TName, Keys extends keyof TablesAndViews<Schema>> = Keys extends infer K ? K extends keyof TablesAndViews<Schema> ? FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K> extends never ? RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>> : FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K> | RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>> : false : false;
declare type FilterRelationships<R, TName, From> = R extends readonly (infer Rel)[] ? Rel extends {
referencedRelation: TName;
} ? Rel & {
from: From;
} : never : never;
export declare type ResolveForwardRelationship<Schema extends GenericSchema, Field extends Ast.FieldNode, CurrentTableOrView extends keyof TablesAndViews<Schema> & string> = FindFieldMatchingRelationships<Schema, TablesAndViews<Schema>[Field['name']]['Relationships'], Ast.FieldNode & {
name: CurrentTableOrView;
hint: Field['hint'];
}> extends infer FoundByName ? FoundByName extends GenericRelationship ? {
referencedTable: TablesAndViews<Schema>[Field['name']];
relation: FoundByName;
direction: 'forward';
from: Field['name'];
type: 'found-by-name';
} : FindFieldMatchingRelationships<Schema, TableForwardRelationships<Schema, CurrentTableOrView>, Field> extends infer FoundByMatch ? FoundByMatch extends GenericRelationship & {
from: keyof TablesAndViews<Schema>;
} ? {
referencedTable: TablesAndViews<Schema>[FoundByMatch['from']];
relation: FoundByMatch;
direction: 'forward';
from: CurrentTableOrView;
type: 'found-by-match';
} : FindJoinTableRelationship<Schema, CurrentTableOrView, Field['name']> extends infer FoundByJoinTable ? FoundByJoinTable extends GenericRelationship ? {
referencedTable: TablesAndViews<Schema>[FoundByJoinTable['referencedRelation']];
relation: FoundByJoinTable & {
match: 'refrel';
};
direction: 'forward';
from: CurrentTableOrView;
type: 'found-by-join-table';
} : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>;
/**
* Given a CurrentTableOrView, finds all join tables to this relation.
* For example, if products and categories are linked via product_categories table:
*
* @example
* Given:
* - CurrentTableView = 'products'
* - FieldName = "categories"
*
* It should return this relationship from product_categories:
* {
* foreignKeyName: "product_categories_category_id_fkey",
* columns: ["category_id"],
* isOneToOne: false,
* referencedRelation: "categories",
* referencedColumns: ["id"]
* }
*/
declare type ResolveJoinTableRelationship<Schema extends GenericSchema, CurrentTableOrView extends keyof TablesAndViews<Schema> & string, FieldName extends string> = {
[TableName in keyof TablesAndViews<Schema>]: DeduplicateRelationships<TablesAndViews<Schema>[TableName]['Relationships']> extends readonly (infer Rel)[] ? Rel extends {
referencedRelation: CurrentTableOrView;
} ? DeduplicateRelationships<TablesAndViews<Schema>[TableName]['Relationships']> extends readonly (infer OtherRel)[] ? OtherRel extends {
referencedRelation: FieldName;
} ? OtherRel : never : never : never : never;
}[keyof TablesAndViews<Schema>];
export declare type FindJoinTableRelationship<Schema extends GenericSchema, CurrentTableOrView extends keyof TablesAndViews<Schema> & string, FieldName extends string> = ResolveJoinTableRelationship<Schema, CurrentTableOrView, FieldName> extends infer Result ? [Result] extends [never] ? false : Result : never;
/**
* Finds a matching relationship based on the FieldNode's name and optional hint.
*/
export declare type FindFieldMatchingRelationships<Schema extends GenericSchema, Relationships extends GenericRelationship[], Field extends Ast.FieldNode> = Field extends {
hint: string;
} ? FindMatchingHintTableRelationships<Schema, Relationships, Field['hint'], Field['name']> extends GenericRelationship ? FindMatchingHintTableRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-table-via-hint';
hint: Field['hint'];
} : FindMatchingHintViewRelationships<Schema, Relationships, Field['hint'], Field['name']> extends GenericRelationship ? FindMatchingHintViewRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-view-via-hint';
hint: Field['hint'];
} : SelectQueryError<'Failed to find matching relation via hint'> : FindMatchingTableRelationships<Schema, Relationships, Field['name']> extends GenericRelationship ? FindMatchingTableRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-table-via-name';
name: Field['name'];
} : FindMatchingViewRelationships<Schema, Relationships, Field['name']> extends GenericRelationship ? FindMatchingViewRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-view-via-name';
name: Field['name'];
} : SelectQueryError<'Failed to find matching relation via name'>;
export declare type JsonPathToAccessor<Path extends string> = Path extends `${infer P1}->${infer P2}` ? P2 extends `>${infer Rest}` ? JsonPathToAccessor<`${P1}.${Rest}`> : P2 extends string ? JsonPathToAccessor<`${P1}.${P2}`> : Path : Path extends `>${infer Rest}` ? JsonPathToAccessor<Rest> : Path extends `${infer P1}::${infer _}` ? JsonPathToAccessor<P1> : Path extends `${infer P1}${')' | ','}${infer _}` ? P1 : Path;
export declare type JsonPathToType<T, Path extends string> = Path extends '' ? T : ContainsNull<T> extends true ? JsonPathToType<Exclude<T, null>, Path> : Path extends `${infer Key}.${infer Rest}` ? Key extends keyof T ? JsonPathToType<T[Key], Rest> : never : Path extends keyof T ? T[Path] : never;
export declare type IsStringUnion<T> = string extends T ? false : T extends string ? [T] extends [never] ? false : true : false;
export {};
//# sourceMappingURL=utils.d.ts.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/select-query-parser/utils.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,98 @@
import PostgrestError from './PostgrestError';
import { ContainsNull } from './select-query-parser/types';
import { SelectQueryError } from './select-query-parser/utils';
export declare type Fetch = typeof fetch;
/**
* Response format
*
* {@link https://github.com/supabase/supabase-js/issues/32}
*/
interface PostgrestResponseBase {
status: number;
statusText: string;
}
export interface PostgrestResponseSuccess<T> extends PostgrestResponseBase {
error: null;
data: T;
count: number | null;
}
export interface PostgrestResponseFailure extends PostgrestResponseBase {
error: PostgrestError;
data: null;
count: null;
}
export declare type PostgrestSingleResponse<T> = PostgrestResponseSuccess<T> | PostgrestResponseFailure;
export declare type PostgrestMaybeSingleResponse<T> = PostgrestSingleResponse<T | null>;
export declare type PostgrestResponse<T> = PostgrestSingleResponse<T[]>;
export declare type GenericRelationship = {
foreignKeyName: string;
columns: string[];
isOneToOne?: boolean;
referencedRelation: string;
referencedColumns: string[];
};
export declare type GenericTable = {
Row: Record<string, unknown>;
Insert: Record<string, unknown>;
Update: Record<string, unknown>;
Relationships: GenericRelationship[];
};
export declare type GenericUpdatableView = {
Row: Record<string, unknown>;
Insert: Record<string, unknown>;
Update: Record<string, unknown>;
Relationships: GenericRelationship[];
};
export declare type GenericNonUpdatableView = {
Row: Record<string, unknown>;
Relationships: GenericRelationship[];
};
export declare type GenericView = GenericUpdatableView | GenericNonUpdatableView;
export declare type GenericFunction = {
Args: Record<string, unknown>;
Returns: unknown;
};
export declare type GenericSchema = {
Tables: Record<string, GenericTable>;
Views: Record<string, GenericView>;
Functions: Record<string, GenericFunction>;
};
export declare type Prettify<T> = {
[K in keyof T]: T[K];
} & {};
export declare type SimplifyDeep<Type, ExcludeType = never> = ConditionalSimplifyDeep<Type, ExcludeType | NonRecursiveType | Set<unknown> | Map<unknown, unknown>, object>;
declare type ConditionalSimplifyDeep<Type, ExcludeType = never, IncludeType = unknown> = Type extends ExcludeType ? Type : Type extends IncludeType ? {
[TypeKey in keyof Type]: ConditionalSimplifyDeep<Type[TypeKey], ExcludeType, IncludeType>;
} : Type;
declare type NonRecursiveType = BuiltIns | Function | (new (...arguments_: any[]) => unknown);
declare type BuiltIns = Primitive | void | Date | RegExp;
declare type Primitive = null | undefined | string | number | boolean | symbol | bigint;
export declare type IsValidResultOverride<Result, NewResult, ErrorResult, ErrorNewResult> = Result extends any[] ? NewResult extends any[] ? true : ErrorResult : NewResult extends any[] ? ErrorNewResult : true;
/**
* Utility type to check if array types match between Result and NewResult.
* Returns either the valid NewResult type or an error message type.
*/
export declare type CheckMatchingArrayTypes<Result, NewResult> = Result extends SelectQueryError<string> ? NewResult : IsValidResultOverride<Result, NewResult, {
Error: 'Type mismatch: Cannot cast array result to a single object. Use .overrideTypes<Array<YourType>> or .returns<Array<YourType>> (deprecated) for array results or .single() to convert the result to a single object';
}, {
Error: 'Type mismatch: Cannot cast single object to array type. Remove Array wrapper from return type or make sure you are not using .single() up in the calling chain';
}> extends infer ValidationResult ? ValidationResult extends true ? ContainsNull<Result> extends true ? NewResult | null : NewResult : ValidationResult : never;
declare type Simplify<T> = T extends object ? {
[K in keyof T]: T[K];
} : T;
declare type ExplicitKeys<T> = {
[K in keyof T]: string extends K ? never : K;
}[keyof T];
declare type MergeExplicit<New, Row> = {
[K in ExplicitKeys<New> | ExplicitKeys<Row>]: K extends keyof New ? K extends keyof Row ? Row[K] extends SelectQueryError<string> ? New[K] : New[K] extends any[] ? Row[K] extends any[] ? Array<Simplify<MergeDeep<NonNullable<New[K][number]>, NonNullable<Row[K][number]>>>> : New[K] : IsPlainObject<NonNullable<New[K]>> extends true ? IsPlainObject<NonNullable<Row[K]>> extends true ? ContainsNull<New[K]> extends true ? // If the override wants to preserve optionality
Simplify<MergeDeep<NonNullable<New[K]>, NonNullable<Row[K]>>> | null : Simplify<MergeDeep<New[K], NonNullable<Row[K]>>> : New[K] : New[K] : New[K] : K extends keyof Row ? Row[K] : never;
};
declare type MergeDeep<New, Row> = Simplify<MergeExplicit<New, Row> & (string extends keyof Row ? {
[K: string]: Row[string];
} : {})>;
declare type IsPlainObject<T> = T extends any[] ? false : T extends object ? true : false;
export declare type MergePartialResult<NewResult, Result, Options> = Options extends {
merge: true;
} ? Result extends any[] ? NewResult extends any[] ? Array<Simplify<MergeDeep<NewResult[number], Result[number]>>> : never : Simplify<MergeDeep<NewResult, Result>> : NewResult;
export {};
//# sourceMappingURL=types.d.ts.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,2 @@
export declare const version = "0.0.0-automated";
//# sourceMappingURL=version.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"version.d.ts","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,OAAO,oBAAoB,CAAA"}

View File

@@ -0,0 +1,5 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.version = void 0;
exports.version = '0.0.0-automated';
//# sourceMappingURL=version.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":";;;AAAa,QAAA,OAAO,GAAG,iBAAiB,CAAA"}

View File

@@ -0,0 +1,28 @@
import index from '../cjs/index.js'
const {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
} = index
export {
PostgrestBuilder,
PostgrestClient,
PostgrestFilterBuilder,
PostgrestQueryBuilder,
PostgrestTransformBuilder,
PostgrestError,
}
// compatibility with CJS output
export default {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}

67
node_modules/@supabase/postgrest-js/package.json generated vendored Normal file
View File

@@ -0,0 +1,67 @@
{
"name": "@supabase/postgrest-js",
"version": "1.19.4",
"description": "Isomorphic PostgREST client",
"keywords": [
"postgrest",
"supabase"
],
"homepage": "https://github.com/supabase/postgrest-js",
"bugs": "https://github.com/supabase/postgrest-js/issues",
"license": "MIT",
"author": "Supabase",
"files": [
"dist",
"src"
],
"main": "dist/cjs/index.js",
"module": "dist/esm/wrapper.mjs",
"exports": {
"import": {
"types": "./dist/cjs/index.d.ts",
"default": "./dist/esm/wrapper.mjs"
},
"require": {
"types": "./dist/cjs/index.d.ts",
"default": "./dist/cjs/index.js"
}
},
"types": "./dist/cjs/index.d.ts",
"repository": "supabase/postgrest-js",
"scripts": {
"clean": "rimraf dist docs/v2",
"format": "prettier --write \"{src,test}/**/*.ts\" wrapper.mjs",
"format:check": "prettier --check \"{src,test}/**/*.ts\"",
"build": "run-s clean format build:*",
"build:cjs": "tsc -p tsconfig.json",
"build:esm": "cpy wrapper.mjs dist/esm/",
"docs": "typedoc src/index.ts --out docs/v2",
"docs:json": "typedoc --json docs/v2/spec.json --excludeExternals src/index.ts",
"test": "run-s format:check test:types db:clean db:run test:run db:clean && node test/smoke.cjs && node test/smoke.mjs",
"test:run": "jest --runInBand --coverage",
"test:update": "run-s db:clean db:run && jest --runInBand --updateSnapshot && run-s db:clean",
"test:types": "run-s build && tsd --files 'test/**/*.test-d.ts'",
"db:clean": "cd test/db && docker compose down --volumes",
"db:run": "cd test/db && docker compose up --detach && wait-for-localhost 3000"
},
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
},
"devDependencies": {
"@types/jest": "^27.5.1",
"cpy-cli": "^5.0.0",
"jest": "^28.1.0",
"node-abort-controller": "^3.0.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.6.2",
"rimraf": "^3.0.2",
"semantic-release-plugin-update-version-in-files": "^1.1.0",
"ts-expect": "^1.3.0",
"ts-jest": "^28.0.3",
"tsd": "^0.31.2",
"type-fest": "^4.32.0",
"typedoc": "^0.22.16",
"typescript": "^4.5.5",
"wait-for-localhost-cli": "^3.0.0"
}
}

View File

@@ -0,0 +1,279 @@
// @ts-ignore
import nodeFetch from '@supabase/node-fetch'
import type {
Fetch,
PostgrestSingleResponse,
PostgrestResponseSuccess,
CheckMatchingArrayTypes,
MergePartialResult,
IsValidResultOverride,
} from './types'
import PostgrestError from './PostgrestError'
import { ContainsNull } from './select-query-parser/types'
export default abstract class PostgrestBuilder<Result, ThrowOnError extends boolean = false>
implements
PromiseLike<
ThrowOnError extends true ? PostgrestResponseSuccess<Result> : PostgrestSingleResponse<Result>
>
{
protected method: 'GET' | 'HEAD' | 'POST' | 'PATCH' | 'DELETE'
protected url: URL
protected headers: Record<string, string>
protected schema?: string
protected body?: unknown
protected shouldThrowOnError = false
protected signal?: AbortSignal
protected fetch: Fetch
protected isMaybeSingle: boolean
constructor(builder: PostgrestBuilder<Result>) {
this.method = builder.method
this.url = builder.url
this.headers = builder.headers
this.schema = builder.schema
this.body = builder.body
this.shouldThrowOnError = builder.shouldThrowOnError
this.signal = builder.signal
this.isMaybeSingle = builder.isMaybeSingle
if (builder.fetch) {
this.fetch = builder.fetch
} else if (typeof fetch === 'undefined') {
this.fetch = nodeFetch
} else {
this.fetch = fetch
}
}
/**
* If there's an error with the query, throwOnError will reject the promise by
* throwing the error instead of returning it as part of a successful response.
*
* {@link https://github.com/supabase/supabase-js/issues/92}
*/
throwOnError(): this & PostgrestBuilder<Result, true> {
this.shouldThrowOnError = true
return this as this & PostgrestBuilder<Result, true>
}
/**
* Set an HTTP header for the request.
*/
setHeader(name: string, value: string): this {
this.headers = { ...this.headers }
this.headers[name] = value
return this
}
then<
TResult1 = ThrowOnError extends true
? PostgrestResponseSuccess<Result>
: PostgrestSingleResponse<Result>,
TResult2 = never
>(
onfulfilled?:
| ((
value: ThrowOnError extends true
? PostgrestResponseSuccess<Result>
: PostgrestSingleResponse<Result>
) => TResult1 | PromiseLike<TResult1>)
| undefined
| null,
onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null
): PromiseLike<TResult1 | TResult2> {
// https://postgrest.org/en/stable/api.html#switching-schemas
if (this.schema === undefined) {
// skip
} else if (['GET', 'HEAD'].includes(this.method)) {
this.headers['Accept-Profile'] = this.schema
} else {
this.headers['Content-Profile'] = this.schema
}
if (this.method !== 'GET' && this.method !== 'HEAD') {
this.headers['Content-Type'] = 'application/json'
}
// NOTE: Invoke w/o `this` to avoid illegal invocation error.
// https://github.com/supabase/postgrest-js/pull/247
const _fetch = this.fetch
let res = _fetch(this.url.toString(), {
method: this.method,
headers: this.headers,
body: JSON.stringify(this.body),
signal: this.signal,
}).then(async (res) => {
let error = null
let data = null
let count: number | null = null
let status = res.status
let statusText = res.statusText
if (res.ok) {
if (this.method !== 'HEAD') {
const body = await res.text()
if (body === '') {
// Prefer: return=minimal
} else if (this.headers['Accept'] === 'text/csv') {
data = body
} else if (
this.headers['Accept'] &&
this.headers['Accept'].includes('application/vnd.pgrst.plan+text')
) {
data = body
} else {
data = JSON.parse(body)
}
}
const countHeader = this.headers['Prefer']?.match(/count=(exact|planned|estimated)/)
const contentRange = res.headers.get('content-range')?.split('/')
if (countHeader && contentRange && contentRange.length > 1) {
count = parseInt(contentRange[1])
}
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.isMaybeSingle && this.method === 'GET' && Array.isArray(data)) {
if (data.length > 1) {
error = {
// https://github.com/PostgREST/postgrest/blob/a867d79c42419af16c18c3fb019eba8df992626f/src/PostgREST/Error.hs#L553
code: 'PGRST116',
details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
hint: null,
message: 'JSON object requested, multiple (or no) rows returned',
}
data = null
count = null
status = 406
statusText = 'Not Acceptable'
} else if (data.length === 1) {
data = data[0]
} else {
data = null
}
}
} else {
const body = await res.text()
try {
error = JSON.parse(body)
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (Array.isArray(error) && res.status === 404) {
data = []
error = null
status = 200
statusText = 'OK'
}
} catch {
// Workaround for https://github.com/supabase/postgrest-js/issues/295
if (res.status === 404 && body === '') {
status = 204
statusText = 'No Content'
} else {
error = {
message: body,
}
}
}
if (error && this.isMaybeSingle && error?.details?.includes('0 rows')) {
error = null
status = 200
statusText = 'OK'
}
if (error && this.shouldThrowOnError) {
throw new PostgrestError(error)
}
}
const postgrestResponse = {
error,
data,
count,
status,
statusText,
}
return postgrestResponse
})
if (!this.shouldThrowOnError) {
res = res.catch((fetchError) => ({
error: {
message: `${fetchError?.name ?? 'FetchError'}: ${fetchError?.message}`,
details: `${fetchError?.stack ?? ''}`,
hint: '',
code: `${fetchError?.code ?? ''}`,
},
data: null,
count: null,
status: 0,
statusText: '',
}))
}
return res.then(onfulfilled, onrejected)
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestBuilder<CheckMatchingArrayTypes<Result, NewResult>, ThrowOnError> {
/* istanbul ignore next */
return this as unknown as PostgrestBuilder<
CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
>
}
/**
* Override the type of the returned `data` field in the response.
*
* @typeParam NewResult - The new type to cast the response data to
* @typeParam Options - Optional type configuration (defaults to { merge: true })
* @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true)
* @example
* ```typescript
* // Merge with existing types (default behavior)
* const query = supabase
* .from('users')
* .select()
* .overrideTypes<{ custom_field: string }>()
*
* // Replace existing types completely
* const replaceQuery = supabase
* .from('users')
* .select()
* .overrideTypes<{ id: number; name: string }, { merge: false }>()
* ```
* @returns A PostgrestBuilder instance with the new type
*/
overrideTypes<
NewResult,
Options extends { merge?: boolean } = { merge: true }
>(): PostgrestBuilder<
IsValidResultOverride<Result, NewResult, false, false> extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? MergePartialResult<NewResult, NonNullable<Result>, Options> | null
: MergePartialResult<NewResult, Result, Options>
: CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
> {
return this as unknown as PostgrestBuilder<
IsValidResultOverride<Result, NewResult, false, false> extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? MergePartialResult<NewResult, NonNullable<Result>, Options> | null
: MergePartialResult<NewResult, Result, Options>
: CheckMatchingArrayTypes<Result, NewResult>,
ThrowOnError
>
}
}

View File

@@ -0,0 +1,181 @@
import PostgrestQueryBuilder from './PostgrestQueryBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import PostgrestBuilder from './PostgrestBuilder'
import { DEFAULT_HEADERS } from './constants'
import { Fetch, GenericSchema } from './types'
/**
* PostgREST client.
*
* @typeParam Database - Types for the schema from the [type
* generator](https://supabase.com/docs/reference/javascript/next/typescript-support)
*
* @typeParam SchemaName - Postgres schema to switch to. Must be a string
* literal, the same one passed to the constructor. If the schema is not
* `"public"`, this must be supplied manually.
*/
export default class PostgrestClient<
Database = any,
SchemaName extends string & keyof Database = 'public' extends keyof Database
? 'public'
: string & keyof Database,
Schema extends GenericSchema = Database[SchemaName] extends GenericSchema
? Database[SchemaName]
: any
> {
url: string
headers: Record<string, string>
schemaName?: SchemaName
fetch?: Fetch
// TODO: Add back shouldThrowOnError once we figure out the typings
/**
* Creates a PostgREST client.
*
* @param url - URL of the PostgREST endpoint
* @param options - Named parameters
* @param options.headers - Custom headers
* @param options.schema - Postgres schema to switch to
* @param options.fetch - Custom fetch
*/
constructor(
url: string,
{
headers = {},
schema,
fetch,
}: {
headers?: Record<string, string>
schema?: SchemaName
fetch?: Fetch
} = {}
) {
this.url = url
this.headers = { ...DEFAULT_HEADERS, ...headers }
this.schemaName = schema
this.fetch = fetch
}
from<
TableName extends string & keyof Schema['Tables'],
Table extends Schema['Tables'][TableName]
>(relation: TableName): PostgrestQueryBuilder<Schema, Table, TableName>
from<ViewName extends string & keyof Schema['Views'], View extends Schema['Views'][ViewName]>(
relation: ViewName
): PostgrestQueryBuilder<Schema, View, ViewName>
/**
* Perform a query on a table or a view.
*
* @param relation - The table or view name to query
*/
from(relation: string): PostgrestQueryBuilder<Schema, any, any> {
const url = new URL(`${this.url}/${relation}`)
return new PostgrestQueryBuilder(url, {
headers: { ...this.headers },
schema: this.schemaName,
fetch: this.fetch,
})
}
/**
* Select a schema to query or perform an function (rpc) call.
*
* The schema needs to be on the list of exposed schemas inside Supabase.
*
* @param schema - The schema to query
*/
schema<DynamicSchema extends string & keyof Database>(
schema: DynamicSchema
): PostgrestClient<
Database,
DynamicSchema,
Database[DynamicSchema] extends GenericSchema ? Database[DynamicSchema] : any
> {
return new PostgrestClient(this.url, {
headers: this.headers,
schema,
fetch: this.fetch,
})
}
/**
* Perform a function call.
*
* @param fn - The function name to call
* @param args - The arguments to pass to the function call
* @param options - Named parameters
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
* @param options.get - When set to `true`, the function will be called with
* read-only access mode.
* @param options.count - Count algorithm to use to count rows returned by the
* function. Only applicable for [set-returning
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
rpc<FnName extends string & keyof Schema['Functions'], Fn extends Schema['Functions'][FnName]>(
fn: FnName,
args: Fn['Args'] = {},
{
head = false,
get = false,
count,
}: {
head?: boolean
get?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<
Schema,
Fn['Returns'] extends any[]
? Fn['Returns'][number] extends Record<string, unknown>
? Fn['Returns'][number]
: never
: never,
Fn['Returns'],
FnName,
null
> {
let method: 'HEAD' | 'GET' | 'POST'
const url = new URL(`${this.url}/rpc/${fn}`)
let body: unknown | undefined
if (head || get) {
method = head ? 'HEAD' : 'GET'
Object.entries(args)
// params with undefined value needs to be filtered out, otherwise it'll
// show up as `?param=undefined`
.filter(([_, value]) => value !== undefined)
// array values need special syntax
.map(([name, value]) => [name, Array.isArray(value) ? `{${value.join(',')}}` : `${value}`])
.forEach(([name, value]) => {
url.searchParams.append(name, value)
})
} else {
method = 'POST'
body = args
}
const headers = { ...this.headers }
if (count) {
headers['Prefer'] = `count=${count}`
}
return new PostgrestFilterBuilder({
method,
url,
headers,
schema: this.schemaName,
body,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<Fn['Returns']>)
}
}

View File

@@ -0,0 +1,18 @@
/**
* Error format
*
* {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes}
*/
export default class PostgrestError extends Error {
details: string
hint: string
code: string
constructor(context: { message: string; details: string; hint: string; code: string }) {
super(context.message)
this.name = 'PostgrestError'
this.details = context.details
this.hint = context.hint
this.code = context.code
}
}

View File

@@ -0,0 +1,592 @@
import PostgrestTransformBuilder from './PostgrestTransformBuilder'
import { JsonPathToAccessor, JsonPathToType } from './select-query-parser/utils'
import { GenericSchema } from './types'
type FilterOperator =
| 'eq'
| 'neq'
| 'gt'
| 'gte'
| 'lt'
| 'lte'
| 'like'
| 'ilike'
| 'is'
| 'in'
| 'cs'
| 'cd'
| 'sl'
| 'sr'
| 'nxl'
| 'nxr'
| 'adj'
| 'ov'
| 'fts'
| 'plfts'
| 'phfts'
| 'wfts'
export type IsStringOperator<Path extends string> = Path extends `${string}->>${string}`
? true
: false
// Match relationship filters with `table.column` syntax and resolve underlying
// column value. If not matched, fallback to generic type.
// TODO: Validate the relationship itself ala select-query-parser. Currently we
// assume that all tables have valid relationships to each other, despite
// nonexistent foreign keys.
type ResolveFilterValue<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
ColumnName extends string
> = ColumnName extends `${infer RelationshipTable}.${infer Remainder}`
? Remainder extends `${infer _}.${infer _}`
? ResolveFilterValue<Schema, Row, Remainder>
: ResolveFilterRelationshipValue<Schema, RelationshipTable, Remainder>
: ColumnName extends keyof Row
? Row[ColumnName]
: // If the column selection is a jsonpath like `data->value` or `data->>value` we attempt to match
// the expected type with the parsed custom json type
IsStringOperator<ColumnName> extends true
? string
: JsonPathToType<Row, JsonPathToAccessor<ColumnName>> extends infer JsonPathValue
? JsonPathValue extends never
? never
: JsonPathValue
: never
type ResolveFilterRelationshipValue<
Schema extends GenericSchema,
RelationshipTable extends string,
RelationshipColumn extends string
> = Schema['Tables'] & Schema['Views'] extends infer TablesAndViews
? RelationshipTable extends keyof TablesAndViews
? 'Row' extends keyof TablesAndViews[RelationshipTable]
? RelationshipColumn extends keyof TablesAndViews[RelationshipTable]['Row']
? TablesAndViews[RelationshipTable]['Row'][RelationshipColumn]
: unknown
: unknown
: unknown
: never
export default class PostgrestFilterBuilder<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
Result,
RelationName = unknown,
Relationships = unknown
> extends PostgrestTransformBuilder<Schema, Row, Result, RelationName, Relationships> {
/**
* Match only rows where `column` is equal to `value`.
*
* To check if the value of `column` is NULL, you should use `.is()` instead.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
eq<ColumnName extends string>(
column: ColumnName,
value: ResolveFilterValue<Schema, Row, ColumnName> extends never
? NonNullable<unknown>
: // We want to infer the type before wrapping it into a `NonNullable` to avoid too deep
// type resolution error
ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? NonNullable<ResolvedFilterValue>
: // We should never enter this case as all the branches are covered above
never
): this {
this.url.searchParams.append(column, `eq.${value}`)
return this
}
/**
* Match only rows where `column` is not equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
neq<ColumnName extends string>(
column: ColumnName,
value: ResolveFilterValue<Schema, Row, ColumnName> extends never
? unknown
: ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? ResolvedFilterValue
: never
): this {
this.url.searchParams.append(column, `neq.${value}`)
return this
}
gt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
gt(column: string, value: unknown): this
/**
* Match only rows where `column` is greater than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gt(column: string, value: unknown): this {
this.url.searchParams.append(column, `gt.${value}`)
return this
}
gte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
gte(column: string, value: unknown): this
/**
* Match only rows where `column` is greater than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
gte(column: string, value: unknown): this {
this.url.searchParams.append(column, `gte.${value}`)
return this
}
lt<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
lt(column: string, value: unknown): this
/**
* Match only rows where `column` is less than `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lt(column: string, value: unknown): this {
this.url.searchParams.append(column, `lt.${value}`)
return this
}
lte<ColumnName extends string & keyof Row>(column: ColumnName, value: Row[ColumnName]): this
lte(column: string, value: unknown): this
/**
* Match only rows where `column` is less than or equal to `value`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
lte(column: string, value: unknown): this {
this.url.searchParams.append(column, `lte.${value}`)
return this
}
like<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this
like(column: string, pattern: string): this
/**
* Match only rows where `column` matches `pattern` case-sensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
like(column: string, pattern: string): this {
this.url.searchParams.append(column, `like.${pattern}`)
return this
}
likeAllOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
likeAllOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches all of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAllOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `like(all).{${patterns.join(',')}}`)
return this
}
likeAnyOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
likeAnyOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches any of `patterns` case-sensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
likeAnyOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `like(any).{${patterns.join(',')}}`)
return this
}
ilike<ColumnName extends string & keyof Row>(column: ColumnName, pattern: string): this
ilike(column: string, pattern: string): this
/**
* Match only rows where `column` matches `pattern` case-insensitively.
*
* @param column - The column to filter on
* @param pattern - The pattern to match with
*/
ilike(column: string, pattern: string): this {
this.url.searchParams.append(column, `ilike.${pattern}`)
return this
}
ilikeAllOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
ilikeAllOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches all of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAllOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `ilike(all).{${patterns.join(',')}}`)
return this
}
ilikeAnyOf<ColumnName extends string & keyof Row>(
column: ColumnName,
patterns: readonly string[]
): this
ilikeAnyOf(column: string, patterns: readonly string[]): this
/**
* Match only rows where `column` matches any of `patterns` case-insensitively.
*
* @param column - The column to filter on
* @param patterns - The patterns to match with
*/
ilikeAnyOf(column: string, patterns: readonly string[]): this {
this.url.searchParams.append(column, `ilike(any).{${patterns.join(',')}}`)
return this
}
is<ColumnName extends string & keyof Row>(
column: ColumnName,
value: Row[ColumnName] & (boolean | null)
): this
is(column: string, value: boolean | null): this
/**
* Match only rows where `column` IS `value`.
*
* For non-boolean columns, this is only relevant for checking if the value of
* `column` is NULL by setting `value` to `null`.
*
* For boolean columns, you can also set `value` to `true` or `false` and it
* will behave the same way as `.eq()`.
*
* @param column - The column to filter on
* @param value - The value to filter with
*/
is(column: string, value: boolean | null): this {
this.url.searchParams.append(column, `is.${value}`)
return this
}
/**
* Match only rows where `column` is included in the `values` array.
*
* @param column - The column to filter on
* @param values - The values array to filter with
*/
in<ColumnName extends string>(
column: ColumnName,
values: ReadonlyArray<
ResolveFilterValue<Schema, Row, ColumnName> extends never
? unknown
: // We want to infer the type before wrapping it into a `NonNullable` to avoid too deep
// type resolution error
ResolveFilterValue<Schema, Row, ColumnName> extends infer ResolvedFilterValue
? ResolvedFilterValue
: // We should never enter this case as all the branches are covered above
never
>
): this {
const cleanedValues = Array.from(new Set(values))
.map((s) => {
// handle postgrest reserved characters
// https://postgrest.org/en/v7.0.0/api.html#reserved-characters
if (typeof s === 'string' && new RegExp('[,()]').test(s)) return `"${s}"`
else return `${s}`
})
.join(',')
this.url.searchParams.append(column, `in.(${cleanedValues})`)
return this
}
contains<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>
): this
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* `column` contains every element appearing in `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
contains(column: string, value: string | readonly unknown[] | Record<string, unknown>): this {
if (typeof value === 'string') {
// range types can be inclusive '[', ']' or exclusive '(', ')' so just
// keep it simple and accept a string
this.url.searchParams.append(column, `cs.${value}`)
} else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cs.{${value.join(',')}}`)
} else {
// json
this.url.searchParams.append(column, `cs.${JSON.stringify(value)}`)
}
return this
}
containedBy<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]> | Record<string, unknown>
): this
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this
/**
* Only relevant for jsonb, array, and range columns. Match only rows where
* every element appearing in `column` is contained by `value`.
*
* @param column - The jsonb, array, or range column to filter on
* @param value - The jsonb, array, or range value to filter with
*/
containedBy(column: string, value: string | readonly unknown[] | Record<string, unknown>): this {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `cd.${value}`)
} else if (Array.isArray(value)) {
// array
this.url.searchParams.append(column, `cd.{${value.join(',')}}`)
} else {
// json
this.url.searchParams.append(column, `cd.${JSON.stringify(value)}`)
}
return this
}
rangeGt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeGt(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is greater than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGt(column: string, range: string): this {
this.url.searchParams.append(column, `sr.${range}`)
return this
}
rangeGte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeGte(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or greater than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeGte(column: string, range: string): this {
this.url.searchParams.append(column, `nxl.${range}`)
return this
}
rangeLt<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeLt(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is less than any element in `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLt(column: string, range: string): this {
this.url.searchParams.append(column, `sl.${range}`)
return this
}
rangeLte<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeLte(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where every element in
* `column` is either contained in `range` or less than any element in
* `range`.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeLte(column: string, range: string): this {
this.url.searchParams.append(column, `nxr.${range}`)
return this
}
rangeAdjacent<ColumnName extends string & keyof Row>(column: ColumnName, range: string): this
rangeAdjacent(column: string, range: string): this
/**
* Only relevant for range columns. Match only rows where `column` is
* mutually exclusive to `range` and there can be no element between the two
* ranges.
*
* @param column - The range column to filter on
* @param range - The range to filter with
*/
rangeAdjacent(column: string, range: string): this {
this.url.searchParams.append(column, `adj.${range}`)
return this
}
overlaps<ColumnName extends string & keyof Row>(
column: ColumnName,
value: string | ReadonlyArray<Row[ColumnName]>
): this
overlaps(column: string, value: string | readonly unknown[]): this
/**
* Only relevant for array and range columns. Match only rows where
* `column` and `value` have an element in common.
*
* @param column - The array or range column to filter on
* @param value - The array or range value to filter with
*/
overlaps(column: string, value: string | readonly unknown[]): this {
if (typeof value === 'string') {
// range
this.url.searchParams.append(column, `ov.${value}`)
} else {
// array
this.url.searchParams.append(column, `ov.{${value.join(',')}}`)
}
return this
}
textSearch<ColumnName extends string & keyof Row>(
column: ColumnName,
query: string,
options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch' }
): this
textSearch(
column: string,
query: string,
options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch' }
): this
/**
* Only relevant for text and tsvector columns. Match only rows where
* `column` matches the query string in `query`.
*
* @param column - The text or tsvector column to filter on
* @param query - The query text to match with
* @param options - Named parameters
* @param options.config - The text search configuration to use
* @param options.type - Change how the `query` text is interpreted
*/
textSearch(
column: string,
query: string,
{ config, type }: { config?: string; type?: 'plain' | 'phrase' | 'websearch' } = {}
): this {
let typePart = ''
if (type === 'plain') {
typePart = 'pl'
} else if (type === 'phrase') {
typePart = 'ph'
} else if (type === 'websearch') {
typePart = 'w'
}
const configPart = config === undefined ? '' : `(${config})`
this.url.searchParams.append(column, `${typePart}fts${configPart}.${query}`)
return this
}
match<ColumnName extends string & keyof Row>(query: Record<ColumnName, Row[ColumnName]>): this
match(query: Record<string, unknown>): this
/**
* Match only rows where each column in `query` keys is equal to its
* associated value. Shorthand for multiple `.eq()`s.
*
* @param query - The object to filter with, with column names as keys mapped
* to their filter values
*/
match(query: Record<string, unknown>): this {
Object.entries(query).forEach(([column, value]) => {
this.url.searchParams.append(column, `eq.${value}`)
})
return this
}
not<ColumnName extends string & keyof Row>(
column: ColumnName,
operator: FilterOperator,
value: Row[ColumnName]
): this
not(column: string, operator: string, value: unknown): this
/**
* Match only rows which doesn't satisfy the filter.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to be negated to filter with, following
* PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
not(column: string, operator: string, value: unknown): this {
this.url.searchParams.append(column, `not.${operator}.${value}`)
return this
}
/**
* Match only rows which satisfy at least one of the filters.
*
* Unlike most filters, `filters` is used as-is and needs to follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure it's properly sanitized.
*
* It's currently not possible to do an `.or()` filter across multiple tables.
*
* @param filters - The filters to use, following PostgREST syntax
* @param options - Named parameters
* @param options.referencedTable - Set this to filter on referenced tables
* instead of the parent table
* @param options.foreignTable - Deprecated, use `referencedTable` instead
*/
or(
filters: string,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const key = referencedTable ? `${referencedTable}.or` : 'or'
this.url.searchParams.append(key, `(${filters})`)
return this
}
filter<ColumnName extends string & keyof Row>(
column: ColumnName,
operator: `${'' | 'not.'}${FilterOperator}`,
value: unknown
): this
filter(column: string, operator: string, value: unknown): this
/**
* Match only rows which satisfy the filter. This is an escape hatch - you
* should use the specific filter methods wherever possible.
*
* Unlike most filters, `opearator` and `value` are used as-is and need to
* follow [PostgREST
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
* to make sure they are properly sanitized.
*
* @param column - The column to filter on
* @param operator - The operator to filter with, following PostgREST syntax
* @param value - The value to filter with, following PostgREST syntax
*/
filter(column: string, operator: string, value: unknown): this {
this.url.searchParams.append(column, `${operator}.${value}`)
return this
}
}

View File

@@ -0,0 +1,381 @@
import PostgrestBuilder from './PostgrestBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import { GetResult } from './select-query-parser/result'
import { Fetch, GenericSchema, GenericTable, GenericView } from './types'
export default class PostgrestQueryBuilder<
Schema extends GenericSchema,
Relation extends GenericTable | GenericView,
RelationName = unknown,
Relationships = Relation extends { Relationships: infer R } ? R : unknown
> {
url: URL
headers: Record<string, string>
schema?: string
signal?: AbortSignal
fetch?: Fetch
constructor(
url: URL,
{
headers = {},
schema,
fetch,
}: {
headers?: Record<string, string>
schema?: string
fetch?: Fetch
}
) {
this.url = url
this.headers = headers
this.schema = schema
this.fetch = fetch
}
/**
* Perform a SELECT query on the table or view.
*
* @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName`
*
* @param options - Named parameters
*
* @param options.head - When set to `true`, `data` will not be returned.
* Useful if you only need the count.
*
* @param options.count - Count algorithm to use to count rows in the table or view.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
select<
Query extends string = '*',
ResultOne = GetResult<Schema, Relation['Row'], RelationName, Relationships, Query>
>(
columns?: Query,
{
head = false,
count,
}: {
head?: boolean
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], ResultOne[], RelationName, Relationships> {
const method = head ? 'HEAD' : 'GET'
// Remove whitespaces except when quoted
let quoted = false
const cleanedColumns = (columns ?? '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return ''
}
if (c === '"') {
quoted = !quoted
}
return c
})
.join('')
this.url.searchParams.set('select', cleanedColumns)
if (count) {
this.headers['Prefer'] = `count=${count}`
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<ResultOne[]>)
}
// TODO(v3): Make `defaultToNull` consistent for both single & bulk inserts.
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row,
options?: {
count?: 'exact' | 'planned' | 'estimated'
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row[],
options?: {
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
/**
* Perform an INSERT into the table or view.
*
* By default, inserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to insert. Pass an object to insert a single row
* or an array to insert multiple rows.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count inserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. Only applies for bulk
* inserts.
*/
insert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row | Row[],
{
count,
defaultToNull = true,
}: {
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'POST'
const prefersHeaders = []
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (!defaultToNull) {
prefersHeaders.push('missing=default')
}
this.headers['Prefer'] = prefersHeaders.join(',')
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), [] as string[])
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`)
this.url.searchParams.set('columns', uniqueColumns.join(','))
}
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
// TODO(v3): Make `defaultToNull` consistent for both single & bulk upserts.
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row,
options?: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row[],
options?: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships>
/**
* Perform an UPSERT on the table or view. Depending on the column(s) passed
* to `onConflict`, `.upsert()` allows you to perform the equivalent of
* `.insert()` if a row with the corresponding `onConflict` columns doesn't
* exist, or if it does exist, perform an alternative action depending on
* `ignoreDuplicates`.
*
* By default, upserted rows are not returned. To return it, chain the call
* with `.select()`.
*
* @param values - The values to upsert with. Pass an object to upsert a
* single row or an array to upsert multiple rows.
*
* @param options - Named parameters
*
* @param options.onConflict - Comma-separated UNIQUE column(s) to specify how
* duplicate rows are determined. Two rows are duplicates if all the
* `onConflict` columns are equal.
*
* @param options.ignoreDuplicates - If `true`, duplicate rows are ignored. If
* `false`, duplicate rows are merged with existing rows.
*
* @param options.count - Count algorithm to use to count upserted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*
* @param options.defaultToNull - Make missing fields default to `null`.
* Otherwise, use the default value for the column. This only applies when
* inserting new rows, not when merging with existing rows under
* `ignoreDuplicates: false`. This also only applies when doing bulk upserts.
*/
upsert<Row extends Relation extends { Insert: unknown } ? Relation['Insert'] : never>(
values: Row | Row[],
{
onConflict,
ignoreDuplicates = false,
count,
defaultToNull = true,
}: {
onConflict?: string
ignoreDuplicates?: boolean
count?: 'exact' | 'planned' | 'estimated'
defaultToNull?: boolean
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'POST'
const prefersHeaders = [`resolution=${ignoreDuplicates ? 'ignore' : 'merge'}-duplicates`]
if (onConflict !== undefined) this.url.searchParams.set('on_conflict', onConflict)
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (!defaultToNull) {
prefersHeaders.push('missing=default')
}
this.headers['Prefer'] = prefersHeaders.join(',')
if (Array.isArray(values)) {
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), [] as string[])
if (columns.length > 0) {
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`)
this.url.searchParams.set('columns', uniqueColumns.join(','))
}
}
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
/**
* Perform an UPDATE on the table or view.
*
* By default, updated rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param values - The values to update with
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count updated rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
update<Row extends Relation extends { Update: unknown } ? Relation['Update'] : never>(
values: Row,
{
count,
}: {
count?: 'exact' | 'planned' | 'estimated'
} = {}
): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'PATCH'
const prefersHeaders = []
if (this.headers['Prefer']) {
prefersHeaders.push(this.headers['Prefer'])
}
if (count) {
prefersHeaders.push(`count=${count}`)
}
this.headers['Prefer'] = prefersHeaders.join(',')
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
body: values,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
/**
* Perform a DELETE on the table or view.
*
* By default, deleted rows are not returned. To return it, chain the call
* with `.select()` after filters.
*
* @param options - Named parameters
*
* @param options.count - Count algorithm to use to count deleted rows.
*
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
* hood.
*
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
* statistics under the hood.
*
* `"estimated"`: Uses exact count for low numbers and planned count for high
* numbers.
*/
delete({
count,
}: {
count?: 'exact' | 'planned' | 'estimated'
} = {}): PostgrestFilterBuilder<Schema, Relation['Row'], null, RelationName, Relationships> {
const method = 'DELETE'
const prefersHeaders = []
if (count) {
prefersHeaders.push(`count=${count}`)
}
if (this.headers['Prefer']) {
prefersHeaders.unshift(this.headers['Prefer'])
}
this.headers['Prefer'] = prefersHeaders.join(',')
return new PostgrestFilterBuilder({
method,
url: this.url,
headers: this.headers,
schema: this.schema,
fetch: this.fetch,
allowEmpty: false,
} as unknown as PostgrestBuilder<null>)
}
}

View File

@@ -0,0 +1,327 @@
import PostgrestBuilder from './PostgrestBuilder'
import { GetResult } from './select-query-parser/result'
import { GenericSchema, CheckMatchingArrayTypes } from './types'
export default class PostgrestTransformBuilder<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
Result,
RelationName = unknown,
Relationships = unknown
> extends PostgrestBuilder<Result> {
/**
* Perform a SELECT on the query result.
*
* By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not
* return modified rows. By calling this method, modified rows are returned in
* `data`.
*
* @param columns - The columns to retrieve, separated by commas
*/
select<
Query extends string = '*',
NewResultOne = GetResult<Schema, Row, RelationName, Relationships, Query>
>(
columns?: Query
): PostgrestTransformBuilder<Schema, Row, NewResultOne[], RelationName, Relationships> {
// Remove whitespaces except when quoted
let quoted = false
const cleanedColumns = (columns ?? '*')
.split('')
.map((c) => {
if (/\s/.test(c) && !quoted) {
return ''
}
if (c === '"') {
quoted = !quoted
}
return c
})
.join('')
this.url.searchParams.set('select', cleanedColumns)
if (this.headers['Prefer']) {
this.headers['Prefer'] += ','
}
this.headers['Prefer'] += 'return=representation'
return this as unknown as PostgrestTransformBuilder<
Schema,
Row,
NewResultOne[],
RelationName,
Relationships
>
}
order<ColumnName extends string & keyof Row>(
column: ColumnName,
options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: undefined }
): this
order(
column: string,
options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: string }
): this
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order<ColumnName extends string & keyof Row>(
column: ColumnName,
options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: undefined }
): this
/**
* @deprecated Use `options.referencedTable` instead of `options.foreignTable`
*/
order(
column: string,
options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: string }
): this
/**
* Order the query result by `column`.
*
* You can call this method multiple times to order by multiple columns.
*
* You can order referenced tables, but it only affects the ordering of the
* parent table if you use `!inner` in the query.
*
* @param column - The column to order by
* @param options - Named parameters
* @param options.ascending - If `true`, the result will be in ascending order
* @param options.nullsFirst - If `true`, `null`s appear first. If `false`,
* `null`s appear last.
* @param options.referencedTable - Set this to order a referenced table by
* its columns
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
order(
column: string,
{
ascending = true,
nullsFirst,
foreignTable,
referencedTable = foreignTable,
}: {
ascending?: boolean
nullsFirst?: boolean
foreignTable?: string
referencedTable?: string
} = {}
): this {
const key = referencedTable ? `${referencedTable}.order` : 'order'
const existingOrder = this.url.searchParams.get(key)
this.url.searchParams.set(
key,
`${existingOrder ? `${existingOrder},` : ''}${column}.${ascending ? 'asc' : 'desc'}${
nullsFirst === undefined ? '' : nullsFirst ? '.nullsfirst' : '.nullslast'
}`
)
return this
}
/**
* Limit the query result by `count`.
*
* @param count - The maximum number of rows to return
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
limit(
count: number,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const key = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`
this.url.searchParams.set(key, `${count}`)
return this
}
/**
* Limit the query result by starting at an offset `from` and ending at the offset `to`.
* Only records within this range are returned.
* This respects the query order and if there is no order clause the range could behave unexpectedly.
* The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third
* and fourth rows of the query.
*
* @param from - The starting index from which to limit the result
* @param to - The last index to which to limit the result
* @param options - Named parameters
* @param options.referencedTable - Set this to limit rows of referenced
* tables instead of the parent table
* @param options.foreignTable - Deprecated, use `options.referencedTable`
* instead
*/
range(
from: number,
to: number,
{
foreignTable,
referencedTable = foreignTable,
}: { foreignTable?: string; referencedTable?: string } = {}
): this {
const keyOffset =
typeof referencedTable === 'undefined' ? 'offset' : `${referencedTable}.offset`
const keyLimit = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`
this.url.searchParams.set(keyOffset, `${from}`)
// Range is inclusive, so add 1
this.url.searchParams.set(keyLimit, `${to - from + 1}`)
return this
}
/**
* Set the AbortSignal for the fetch request.
*
* @param signal - The AbortSignal to use for the fetch request
*/
abortSignal(signal: AbortSignal): this {
this.signal = signal
return this
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be one row (e.g. using `.limit(1)`), otherwise this
* returns an error.
*/
single<
ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never
>(): PostgrestBuilder<ResultOne> {
this.headers['Accept'] = 'application/vnd.pgrst.object+json'
return this as unknown as PostgrestBuilder<ResultOne>
}
/**
* Return `data` as a single object instead of an array of objects.
*
* Query result must be zero or one row (e.g. using `.limit(1)`), otherwise
* this returns an error.
*/
maybeSingle<
ResultOne = Result extends (infer ResultOne)[] ? ResultOne : never
>(): PostgrestBuilder<ResultOne | null> {
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
if (this.method === 'GET') {
this.headers['Accept'] = 'application/json'
} else {
this.headers['Accept'] = 'application/vnd.pgrst.object+json'
}
this.isMaybeSingle = true
return this as unknown as PostgrestBuilder<ResultOne | null>
}
/**
* Return `data` as a string in CSV format.
*/
csv(): PostgrestBuilder<string> {
this.headers['Accept'] = 'text/csv'
return this as unknown as PostgrestBuilder<string>
}
/**
* Return `data` as an object in [GeoJSON](https://geojson.org) format.
*/
geojson(): PostgrestBuilder<Record<string, unknown>> {
this.headers['Accept'] = 'application/geo+json'
return this as unknown as PostgrestBuilder<Record<string, unknown>>
}
/**
* Return `data` as the EXPLAIN plan for the query.
*
* You need to enable the
* [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain)
* setting before using this method.
*
* @param options - Named parameters
*
* @param options.analyze - If `true`, the query will be executed and the
* actual run time will be returned
*
* @param options.verbose - If `true`, the query identifier will be returned
* and `data` will include the output columns of the query
*
* @param options.settings - If `true`, include information on configuration
* parameters that affect query planning
*
* @param options.buffers - If `true`, include information on buffer usage
*
* @param options.wal - If `true`, include information on WAL record generation
*
* @param options.format - The format of the output, can be `"text"` (default)
* or `"json"`
*/
explain({
analyze = false,
verbose = false,
settings = false,
buffers = false,
wal = false,
format = 'text',
}: {
analyze?: boolean
verbose?: boolean
settings?: boolean
buffers?: boolean
wal?: boolean
format?: 'json' | 'text'
} = {}): PostgrestBuilder<Record<string, unknown>[]> | PostgrestBuilder<string> {
const options = [
analyze ? 'analyze' : null,
verbose ? 'verbose' : null,
settings ? 'settings' : null,
buffers ? 'buffers' : null,
wal ? 'wal' : null,
]
.filter(Boolean)
.join('|')
// An Accept header can carry multiple media types but postgrest-js always sends one
const forMediatype = this.headers['Accept'] ?? 'application/json'
this.headers[
'Accept'
] = `application/vnd.pgrst.plan+${format}; for="${forMediatype}"; options=${options};`
if (format === 'json') return this as unknown as PostgrestBuilder<Record<string, unknown>[]>
else return this as unknown as PostgrestBuilder<string>
}
/**
* Rollback the query.
*
* `data` will still be returned, but the query is not committed.
*/
rollback(): this {
if ((this.headers['Prefer'] ?? '').trim().length > 0) {
this.headers['Prefer'] += ',tx=rollback'
} else {
this.headers['Prefer'] = 'tx=rollback'
}
return this
}
/**
* Override the type of the returned `data`.
*
* @typeParam NewResult - The new result type to override with
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
*/
returns<NewResult>(): PostgrestTransformBuilder<
Schema,
Row,
CheckMatchingArrayTypes<Result, NewResult>,
RelationName,
Relationships
> {
return this as unknown as PostgrestTransformBuilder<
Schema,
Row,
CheckMatchingArrayTypes<Result, NewResult>,
RelationName,
Relationships
>
}
}

2
node_modules/@supabase/postgrest-js/src/constants.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
import { version } from './version'
export const DEFAULT_HEADERS = { 'X-Client-Info': `postgrest-js/${version}` }

34
node_modules/@supabase/postgrest-js/src/index.ts generated vendored Normal file
View File

@@ -0,0 +1,34 @@
// Always update wrapper.mjs when updating this file.
import PostgrestClient from './PostgrestClient'
import PostgrestQueryBuilder from './PostgrestQueryBuilder'
import PostgrestFilterBuilder from './PostgrestFilterBuilder'
import PostgrestTransformBuilder from './PostgrestTransformBuilder'
import PostgrestBuilder from './PostgrestBuilder'
import PostgrestError from './PostgrestError'
export {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}
export default {
PostgrestClient,
PostgrestQueryBuilder,
PostgrestFilterBuilder,
PostgrestTransformBuilder,
PostgrestBuilder,
PostgrestError,
}
export type {
PostgrestResponse,
PostgrestResponseFailure,
PostgrestResponseSuccess,
PostgrestSingleResponse,
PostgrestMaybeSingleResponse,
} from './types'
// https://github.com/supabase/postgrest-js/issues/551
// To be replaced with a helper type that only uses public types
export type { GetResult as UnstableGetResult } from './select-query-parser/result'

View File

@@ -0,0 +1,469 @@
// Credits to @bnjmnt4n (https://www.npmjs.com/package/postgrest-query)
// See https://github.com/PostgREST/postgrest/blob/2f91853cb1de18944a4556df09e52450b881cfb3/src/PostgREST/ApiRequest/QueryParams.hs#L282-L284
import { SimplifyDeep } from '../types'
import { JsonPathToAccessor } from './utils'
/**
* Parses a query.
* A query is a sequence of nodes, separated by `,`, ensuring that there is
* no remaining input after all nodes have been parsed.
*
* Returns an array of parsed nodes, or an error.
*/
export type ParseQuery<Query extends string> = string extends Query
? GenericStringError
: ParseNodes<EatWhitespace<Query>> extends [infer Nodes, `${infer Remainder}`]
? Nodes extends Ast.Node[]
? EatWhitespace<Remainder> extends ''
? SimplifyDeep<Nodes>
: ParserError<`Unexpected input: ${Remainder}`>
: ParserError<'Invalid nodes array structure'>
: ParseNodes<EatWhitespace<Query>>
/**
* Notes: all `Parse*` types assume that their input strings have their whitespace
* removed. They return tuples of ["Return Value", "Remainder of text"] or
* a `ParserError`.
*/
/**
* Parses a sequence of nodes, separated by `,`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"] or an error.
*/
type ParseNodes<Input extends string> = string extends Input
? GenericStringError
: ParseNodesHelper<Input, []>
type ParseNodesHelper<Input extends string, Nodes extends Ast.Node[]> = ParseNode<Input> extends [
infer Node,
`${infer Remainder}`
]
? Node extends Ast.Node
? EatWhitespace<Remainder> extends `,${infer Remainder}`
? ParseNodesHelper<EatWhitespace<Remainder>, [...Nodes, Node]>
: [[...Nodes, Node], EatWhitespace<Remainder>]
: ParserError<'Invalid node type in nodes helper'>
: ParseNode<Input>
/**
* Parses a node.
* A node is one of the following:
* - `*`
* - a field, as defined above
* - a renamed field, `renamed_field:field`
* - a spread field, `...field`
*/
type ParseNode<Input extends string> = Input extends ''
? ParserError<'Empty string'>
: // `*`
Input extends `*${infer Remainder}`
? [Ast.StarNode, EatWhitespace<Remainder>]
: // `...field`
Input extends `...${infer Remainder}`
? ParseField<EatWhitespace<Remainder>> extends [infer TargetField, `${infer Remainder}`]
? TargetField extends Ast.FieldNode
? [{ type: 'spread'; target: TargetField }, EatWhitespace<Remainder>]
: ParserError<'Invalid target field type in spread'>
: ParserError<`Unable to parse spread resource at \`${Input}\``>
: ParseIdentifier<Input> extends [infer NameOrAlias, `${infer Remainder}`]
? EatWhitespace<Remainder> extends `::${infer _}`
? // It's a type cast and not an alias, so treat it as part of the field.
ParseField<Input>
: EatWhitespace<Remainder> extends `:${infer Remainder}`
? // `alias:`
ParseField<EatWhitespace<Remainder>> extends [infer Field, `${infer Remainder}`]
? Field extends Ast.FieldNode
? [Omit<Field, 'alias'> & { alias: NameOrAlias }, EatWhitespace<Remainder>]
: ParserError<'Invalid field type in alias parsing'>
: ParserError<`Unable to parse renamed field at \`${Input}\``>
: // Otherwise, just parse it as a field without alias.
ParseField<Input>
: ParserError<`Expected identifier at \`${Input}\``>
/**
* Parses a field without preceding alias.
* A field is one of the following:
* - a top-level `count` field: https://docs.postgrest.org/en/v12/references/api/aggregate_functions.html#the-case-of-count
* - a field with an embedded resource
* - `field(nodes)`
* - `field!hint(nodes)`
* - `field!inner(nodes)`
* - `field!left(nodes)`
* - `field!hint!inner(nodes)`
* - `field!hint!left(nodes)`
* - a field without an embedded resource (see {@link ParseNonEmbeddedResourceField})
*/
type ParseField<Input extends string> = Input extends ''
? ParserError<'Empty string'>
: ParseIdentifier<Input> extends [infer Name, `${infer Remainder}`]
? Name extends 'count'
? ParseCountField<Input>
: Remainder extends `!inner${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field!inner(nodes)`
[{ type: 'field'; name: Name; innerJoin: true; children: Children }, Remainder]
: ParserError<'Invalid children array in inner join'>
: CreateParserErrorIfRequired<
ParseEmbeddedResource<EatWhitespace<Remainder>>,
`Expected embedded resource after "!inner" at \`${Remainder}\``
>
: EatWhitespace<Remainder> extends `!left${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field!left(nodes)`
// !left is a noise word - treat it the same way as a non-`!inner`.
[{ type: 'field'; name: Name; children: Children }, EatWhitespace<Remainder>]
: ParserError<'Invalid children array in left join'>
: CreateParserErrorIfRequired<
ParseEmbeddedResource<EatWhitespace<Remainder>>,
`Expected embedded resource after "!left" at \`${EatWhitespace<Remainder>}\``
>
: EatWhitespace<Remainder> extends `!${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [infer Hint, `${infer Remainder}`]
? EatWhitespace<Remainder> extends `!inner${infer Remainder}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
]
? Children extends Ast.Node[]
? // `field!hint!inner(nodes)`
[
{ type: 'field'; name: Name; hint: Hint; innerJoin: true; children: Children },
EatWhitespace<Remainder>
]
: ParserError<'Invalid children array in hint inner join'>
: ParseEmbeddedResource<EatWhitespace<Remainder>>
: ParseEmbeddedResource<EatWhitespace<Remainder>> extends [
infer Children,
`${infer Remainder}`
]
? Children extends Ast.Node[]
? // `field!hint(nodes)`
[
{ type: 'field'; name: Name; hint: Hint; children: Children },
EatWhitespace<Remainder>
]
: ParserError<'Invalid children array in hint'>
: ParseEmbeddedResource<EatWhitespace<Remainder>>
: ParserError<`Expected identifier after "!" at \`${EatWhitespace<Remainder>}\``>
: EatWhitespace<Remainder> extends `(${infer _}`
? ParseEmbeddedResource<EatWhitespace<Remainder>> extends [infer Children, `${infer Remainder}`]
? Children extends Ast.Node[]
? // `field(nodes)`
[{ type: 'field'; name: Name; children: Children }, EatWhitespace<Remainder>]
: ParserError<'Invalid children array in field'>
: // Return error if start of embedded resource was detected but not found.
ParseEmbeddedResource<EatWhitespace<Remainder>>
: // Otherwise it's a non-embedded resource field.
ParseNonEmbeddedResourceField<Input>
: ParserError<`Expected identifier at \`${Input}\``>
type ParseCountField<Input extends string> = ParseIdentifier<Input> extends [
'count',
`${infer Remainder}`
]
? (
EatWhitespace<Remainder> extends `()${infer Remainder_}`
? EatWhitespace<Remainder_>
: EatWhitespace<Remainder>
) extends `${infer Remainder}`
? Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [
{ type: 'field'; name: 'count'; aggregateFunction: 'count'; castType: CastType },
Remainder
]
: ParseFieldTypeCast<Remainder>
: [{ type: 'field'; name: 'count'; aggregateFunction: 'count' }, Remainder]
: never
: ParserError<`Expected "count" at \`${Input}\``>
/**
* Parses an embedded resource, which is an opening `(`, followed by a sequence of
* 0 or more nodes separated by `,`, then a closing `)`.
*
* Returns a tuple of ["Parsed fields", "Remainder of text"], an error,
* or the original string input indicating that no opening `(` was found.
*/
type ParseEmbeddedResource<Input extends string> = Input extends `(${infer Remainder}`
? EatWhitespace<Remainder> extends `)${infer Remainder}`
? [[], EatWhitespace<Remainder>]
: ParseNodes<EatWhitespace<Remainder>> extends [infer Nodes, `${infer Remainder}`]
? Nodes extends Ast.Node[]
? EatWhitespace<Remainder> extends `)${infer Remainder}`
? [Nodes, EatWhitespace<Remainder>]
: ParserError<`Expected ")" at \`${EatWhitespace<Remainder>}\``>
: ParserError<'Invalid nodes array in embedded resource'>
: ParseNodes<EatWhitespace<Remainder>>
: ParserError<`Expected "(" at \`${Input}\``>
/**
* Parses a field excluding embedded resources, without preceding field renaming.
* This is one of the following:
* - `field`
* - `field.aggregate()`
* - `field.aggregate()::type`
* - `field::type`
* - `field::type.aggregate()`
* - `field::type.aggregate()::type`
* - `field->json...`
* - `field->json.aggregate()`
* - `field->json.aggregate()::type`
* - `field->json::type`
* - `field->json::type.aggregate()`
* - `field->json::type.aggregate()::type`
*/
type ParseNonEmbeddedResourceField<Input extends string> = ParseIdentifier<Input> extends [
infer Name,
`${infer Remainder}`
]
? // Parse optional JSON path.
(
Remainder extends `->${infer PathAndRest}`
? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
]
? [
{
type: 'field'
name: Name
alias: PropertyName
castType: PropertyType
jsonPath: JsonPathToAccessor<
PathAndRest extends `${infer Path},${string}` ? Path : PathAndRest
>
},
Remainder
]
: ParseJsonAccessor<Remainder>
: [{ type: 'field'; name: Name }, Remainder]
) extends infer Parsed
? Parsed extends [infer Field, `${infer Remainder}`]
? // Parse optional typecast or aggregate function input typecast.
(
Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [Omit<Field, 'castType'> & { castType: CastType }, Remainder]
: ParseFieldTypeCast<Remainder>
: [Field, Remainder]
) extends infer Parsed
? Parsed extends [infer Field, `${infer Remainder}`]
? // Parse optional aggregate function.
Remainder extends `.${infer _}`
? ParseFieldAggregation<Remainder> extends [
infer AggregateFunction,
`${infer Remainder}`
]
? // Parse optional aggregate function output typecast.
Remainder extends `::${infer _}`
? ParseFieldTypeCast<Remainder> extends [infer CastType, `${infer Remainder}`]
? [
Omit<Field, 'castType'> & {
aggregateFunction: AggregateFunction
castType: CastType
},
Remainder
]
: ParseFieldTypeCast<Remainder>
: [Field & { aggregateFunction: AggregateFunction }, Remainder]
: ParseFieldAggregation<Remainder>
: [Field, Remainder]
: Parsed
: never
: Parsed
: never
: ParserError<`Expected identifier at \`${Input}\``>
/**
* Parses a JSON property accessor of the shape `->a->b->c`. The last accessor in
* the series may convert to text by using the ->> operator instead of ->.
*
* Returns a tuple of ["Last property name", "Last property type", "Remainder of text"]
*/
type ParseJsonAccessor<Input extends string> = Input extends `->${infer Remainder}`
? Remainder extends `>${infer Remainder}`
? ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`]
? [Name, 'text', EatWhitespace<Remainder>]
: ParserError<'Expected property name after `->>`'>
: ParseIdentifier<Remainder> extends [infer Name, `${infer Remainder}`]
? ParseJsonAccessor<Remainder> extends [
infer PropertyName,
infer PropertyType,
`${infer Remainder}`
]
? [PropertyName, PropertyType, EatWhitespace<Remainder>]
: [Name, 'json', EatWhitespace<Remainder>]
: ParserError<'Expected property name after `->`'>
: ParserError<'Expected ->'>
/**
* Parses a field typecast (`::type`), returning a tuple of ["Type", "Remainder of text"].
*/
type ParseFieldTypeCast<Input extends string> = EatWhitespace<Input> extends `::${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [`${infer CastType}`, `${infer Remainder}`]
? [CastType, EatWhitespace<Remainder>]
: ParserError<`Invalid type for \`::\` operator at \`${Remainder}\``>
: ParserError<'Expected ::'>
/**
* Parses a field aggregation (`.max()`), returning a tuple of ["Aggregate function", "Remainder of text"]
*/
type ParseFieldAggregation<Input extends string> =
EatWhitespace<Input> extends `.${infer Remainder}`
? ParseIdentifier<EatWhitespace<Remainder>> extends [
`${infer FunctionName}`,
`${infer Remainder}`
]
? // Ensure that aggregation function is valid.
FunctionName extends Token.AggregateFunction
? EatWhitespace<Remainder> extends `()${infer Remainder}`
? [FunctionName, EatWhitespace<Remainder>]
: ParserError<`Expected \`()\` after \`.\` operator \`${FunctionName}\``>
: ParserError<`Invalid type for \`.\` operator \`${FunctionName}\``>
: ParserError<`Invalid type for \`.\` operator at \`${Remainder}\``>
: ParserError<'Expected .'>
/**
* Parses a (possibly double-quoted) identifier.
* Identifiers are sequences of 1 or more letters.
*/
type ParseIdentifier<Input extends string> = ParseLetters<Input> extends [
infer Name,
`${infer Remainder}`
]
? [Name, EatWhitespace<Remainder>]
: ParseQuotedLetters<Input> extends [infer Name, `${infer Remainder}`]
? [Name, EatWhitespace<Remainder>]
: ParserError<`No (possibly double-quoted) identifier at \`${Input}\``>
/**
* Parse a consecutive sequence of 1 or more letter, where letters are `[0-9a-zA-Z_]`.
*/
type ParseLetters<Input extends string> = string extends Input
? GenericStringError
: ParseLettersHelper<Input, ''> extends [`${infer Letters}`, `${infer Remainder}`]
? Letters extends ''
? ParserError<`Expected letter at \`${Input}\``>
: [Letters, Remainder]
: ParseLettersHelper<Input, ''>
type ParseLettersHelper<Input extends string, Acc extends string> = string extends Input
? GenericStringError
: Input extends `${infer L}${infer Remainder}`
? L extends Token.Letter
? ParseLettersHelper<Remainder, `${Acc}${L}`>
: [Acc, Input]
: [Acc, '']
/**
* Parse a consecutive sequence of 1 or more double-quoted letters,
* where letters are `[^"]`.
*/
type ParseQuotedLetters<Input extends string> = string extends Input
? GenericStringError
: Input extends `"${infer Remainder}`
? ParseQuotedLettersHelper<Remainder, ''> extends [`${infer Letters}`, `${infer Remainder}`]
? Letters extends ''
? ParserError<`Expected string at \`${Remainder}\``>
: [Letters, Remainder]
: ParseQuotedLettersHelper<Remainder, ''>
: ParserError<`Not a double-quoted string at \`${Input}\``>
type ParseQuotedLettersHelper<Input extends string, Acc extends string> = string extends Input
? GenericStringError
: Input extends `${infer L}${infer Remainder}`
? L extends '"'
? [Acc, Remainder]
: ParseQuotedLettersHelper<Remainder, `${Acc}${L}`>
: ParserError<`Missing closing double-quote in \`"${Acc}${Input}\``>
/**
* Trims whitespace from the left of the input.
*/
type EatWhitespace<Input extends string> = string extends Input
? GenericStringError
: Input extends `${Token.Whitespace}${infer Remainder}`
? EatWhitespace<Remainder>
: Input
/**
* Creates a new {@link ParserError} if the given input is not already a parser error.
*/
type CreateParserErrorIfRequired<Input, Message extends string> = Input extends ParserError<string>
? Input
: ParserError<Message>
/**
* Parser errors.
*/
export type ParserError<Message extends string> = { error: true } & Message
type GenericStringError = ParserError<'Received a generic string'>
export namespace Ast {
export type Node = FieldNode | StarNode | SpreadNode
export type FieldNode = {
type: 'field'
name: string
alias?: string
hint?: string
innerJoin?: true
castType?: string
jsonPath?: string
aggregateFunction?: Token.AggregateFunction
children?: Node[]
}
export type StarNode = {
type: 'star'
}
export type SpreadNode = {
type: 'spread'
target: FieldNode & { children: Node[] }
}
}
namespace Token {
export type Whitespace = ' ' | '\n' | '\t'
type LowerAlphabet =
| 'a'
| 'b'
| 'c'
| 'd'
| 'e'
| 'f'
| 'g'
| 'h'
| 'i'
| 'j'
| 'k'
| 'l'
| 'm'
| 'n'
| 'o'
| 'p'
| 'q'
| 'r'
| 's'
| 't'
| 'u'
| 'v'
| 'w'
| 'x'
| 'y'
| 'z'
type Alphabet = LowerAlphabet | Uppercase<LowerAlphabet>
type Digit = '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0'
export type Letter = Alphabet | Digit | '_'
export type AggregateFunction = 'count' | 'sum' | 'avg' | 'min' | 'max'
}

View File

@@ -0,0 +1,422 @@
import { GenericTable } from '../types'
import { ContainsNull, GenericRelationship, PostgreSQLTypes } from './types'
import { Ast, ParseQuery } from './parser'
import {
AggregateFunctions,
ExtractFirstProperty,
GenericSchema,
IsNonEmptyArray,
Prettify,
TablesAndViews,
TypeScriptTypes,
} from './types'
import {
CheckDuplicateEmbededReference,
GetFieldNodeResultName,
IsAny,
IsRelationNullable,
IsStringUnion,
JsonPathToType,
ResolveRelationship,
SelectQueryError,
} from './utils'
/**
* Main entry point for constructing the result type of a PostgREST query.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Query - The select query string literal to parse.
*/
export type GetResult<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName,
Relationships,
Query extends string
> = IsAny<Schema> extends true
? ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RelationName extends string
? ProcessNodesWithoutSchema<ParsedQuery>
: any
: ParsedQuery
: any
: Relationships extends null // For .rpc calls the passed relationships will be null in that case, the result will always be the function return type
? ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RPCCallNodes<ParsedQuery, RelationName extends string ? RelationName : 'rpc_call', Row>
: ParsedQuery
: Row
: ParseQuery<Query> extends infer ParsedQuery
? ParsedQuery extends Ast.Node[]
? RelationName extends string
? Relationships extends GenericRelationship[]
? ProcessNodes<Schema, Row, RelationName, Relationships, ParsedQuery>
: SelectQueryError<'Invalid Relationships cannot infer result type'>
: SelectQueryError<'Invalid RelationName cannot infer result type'>
: ParsedQuery
: never
type ProcessSimpleFieldWithoutSchema<Field extends Ast.FieldNode> =
Field['aggregateFunction'] extends AggregateFunctions
? {
// An aggregate function will always override the column name id.sum() will become sum
// except if it has been aliased
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? TypeScriptTypes<Field['castType']>
: number
}
: {
// Aliases override the property name in the result
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes // We apply the detected casted as the result type
? TypeScriptTypes<Field['castType']>
: any
}
type ProcessFieldNodeWithoutSchema<Node extends Ast.FieldNode> = IsNonEmptyArray<
Node['children']
> extends true
? {
[K in GetFieldNodeResultName<Node>]: Node['children'] extends Ast.Node[]
? ProcessNodesWithoutSchema<Node['children']>[]
: ProcessSimpleFieldWithoutSchema<Node>
}
: ProcessSimpleFieldWithoutSchema<Node>
/**
* Processes a single Node without schema and returns the resulting TypeScript type.
*/
type ProcessNodeWithoutSchema<Node extends Ast.Node> = Node extends Ast.StarNode
? any
: Node extends Ast.SpreadNode
? Node['target']['children'] extends Ast.StarNode[]
? any
: Node['target']['children'] extends Ast.FieldNode[]
? {
[P in Node['target']['children'][number] as GetFieldNodeResultName<P>]: P['castType'] extends PostgreSQLTypes
? TypeScriptTypes<P['castType']>
: any
}
: any
: Node extends Ast.FieldNode
? ProcessFieldNodeWithoutSchema<Node>
: any
/**
* Processes nodes when Schema is any, providing basic type inference
*/
type ProcessNodesWithoutSchema<
Nodes extends Ast.Node[],
Acc extends Record<string, unknown> = {}
> = Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessNodeWithoutSchema<FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? ProcessNodesWithoutSchema<RestNodes, Acc & FieldResult>
: FieldResult
: any
: any
: any
: Prettify<Acc>
/**
* Processes a single Node from a select chained after a rpc call
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current rpc function
* @param NodeType - The Node to process.
*/
export type ProcessRPCNode<
Row extends Record<string, unknown>,
RelationName extends string,
NodeType extends Ast.Node
> = NodeType['type'] extends Ast.StarNode['type'] // If the selection is *
? Row
: NodeType['type'] extends Ast.FieldNode['type']
? ProcessSimpleField<Row, RelationName, Extract<NodeType, Ast.FieldNode>>
: SelectQueryError<'RPC Unsupported node type.'>
/**
* Process select call that can be chained after an rpc call
*/
export type RPCCallNodes<
Nodes extends Ast.Node[],
RelationName extends string,
Row extends Record<string, unknown>,
Acc extends Record<string, unknown> = {} // Acc is now an object
> = Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessRPCNode<Row, RelationName, FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? RPCCallNodes<RestNodes, RelationName, Row, Acc & FieldResult>
: FieldResult extends SelectQueryError<infer E>
? SelectQueryError<E>
: SelectQueryError<'Could not retrieve a valid record or error value'>
: SelectQueryError<'Processing node failed.'>
: SelectQueryError<'Invalid rest nodes array in RPC call'>
: SelectQueryError<'Invalid first node in RPC call'>
: Prettify<Acc>
/**
* Recursively processes an array of Nodes and accumulates the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Nodes - An array of AST nodes to process.
* @param Acc - Accumulator for the constructed type.
*/
export type ProcessNodes<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.Node[],
Acc extends Record<string, unknown> = {} // Acc is now an object
> = CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes> extends false
? Nodes extends [infer FirstNode, ...infer RestNodes]
? FirstNode extends Ast.Node
? RestNodes extends Ast.Node[]
? ProcessNode<Schema, Row, RelationName, Relationships, FirstNode> extends infer FieldResult
? FieldResult extends Record<string, unknown>
? ProcessNodes<Schema, Row, RelationName, Relationships, RestNodes, Acc & FieldResult>
: FieldResult extends SelectQueryError<infer E>
? SelectQueryError<E>
: SelectQueryError<'Could not retrieve a valid record or error value'>
: SelectQueryError<'Processing node failed.'>
: SelectQueryError<'Invalid rest nodes array type in ProcessNodes'>
: SelectQueryError<'Invalid first node type in ProcessNodes'>
: Prettify<Acc>
: Prettify<CheckDuplicateEmbededReference<Schema, RelationName, Relationships, Nodes>>
/**
* Processes a single Node and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param NodeType - The Node to process.
*/
export type ProcessNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
NodeType extends Ast.Node
> =
// TODO: figure out why comparing the `type` property is necessary vs. `NodeType extends Ast.StarNode`
NodeType['type'] extends Ast.StarNode['type'] // If the selection is *
? Row
: NodeType['type'] extends Ast.SpreadNode['type'] // If the selection is a ...spread
? ProcessSpreadNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.SpreadNode>>
: NodeType['type'] extends Ast.FieldNode['type']
? ProcessFieldNode<Schema, Row, RelationName, Relationships, Extract<NodeType, Ast.FieldNode>>
: SelectQueryError<'Unsupported node type.'>
/**
* Processes a FieldNode and returns the resulting TypeScript type.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
type ProcessFieldNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode
> = Field['children'] extends []
? {}
: IsNonEmptyArray<Field['children']> extends true // Has embedded resource?
? ProcessEmbeddedResource<Schema, Relationships, Field, RelationName>
: ProcessSimpleField<Row, RelationName, Field>
type ResolveJsonPathType<
Value,
Path extends string | undefined,
CastType extends PostgreSQLTypes
> = Path extends string
? JsonPathToType<Value, Path> extends never
? // Always fallback if JsonPathToType returns never
TypeScriptTypes<CastType>
: JsonPathToType<Value, Path> extends infer PathResult
? PathResult extends string
? // Use the result if it's a string as we know that even with the string accessor ->> it's a valid type
PathResult
: IsStringUnion<PathResult> extends true
? // Use the result if it's a union of strings
PathResult
: CastType extends 'json'
? // If the type is not a string, ensure it was accessed with json accessor ->
PathResult
: // Otherwise it means non-string value accessed with string accessor ->> use the TypeScriptTypes result
TypeScriptTypes<CastType>
: TypeScriptTypes<CastType>
: // No json path, use regular type casting
TypeScriptTypes<CastType>
/**
* Processes a simple field (without embedded resources).
*
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Field - The FieldNode to process.
*/
type ProcessSimpleField<
Row extends Record<string, unknown>,
RelationName extends string,
Field extends Ast.FieldNode
> = Field['name'] extends keyof Row | 'count'
? Field['aggregateFunction'] extends AggregateFunctions
? {
// An aggregate function will always override the column name id.sum() will become sum
// except if it has been aliased
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? TypeScriptTypes<Field['castType']>
: number
}
: {
// Aliases override the property name in the result
[K in GetFieldNodeResultName<Field>]: Field['castType'] extends PostgreSQLTypes
? ResolveJsonPathType<Row[Field['name']], Field['jsonPath'], Field['castType']>
: Row[Field['name']]
}
: SelectQueryError<`column '${Field['name']}' does not exist on '${RelationName}'.`>
/**
* Processes an embedded resource (relation).
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Field - The FieldNode to process.
*/
export type ProcessEmbeddedResource<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = ResolveRelationship<Schema, Relationships, Field, CurrentTableOrView> extends infer Resolved
? Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>
relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' }
direction: string
}
? ProcessEmbeddedResourceResult<Schema, Resolved, Field, CurrentTableOrView>
: // Otherwise the Resolved is a SelectQueryError return it
{ [K in GetFieldNodeResultName<Field>]: Resolved }
: {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to resolve relationship.'> &
string
}
/**
* Helper type to process the result of an embedded resource.
*/
type ProcessEmbeddedResourceResult<
Schema extends GenericSchema,
Resolved extends {
referencedTable: Pick<GenericTable, 'Row' | 'Relationships'>
relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' }
direction: string
},
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema>
> = ProcessNodes<
Schema,
Resolved['referencedTable']['Row'],
Field['name'],
Resolved['referencedTable']['Relationships'],
Field['children'] extends undefined
? []
: Exclude<Field['children'], undefined> extends Ast.Node[]
? Exclude<Field['children'], undefined>
: []
> extends infer ProcessedChildren
? {
[K in GetFieldNodeResultName<Field>]: Resolved['direction'] extends 'forward'
? Field extends { innerJoin: true }
? Resolved['relation']['isOneToOne'] extends true
? ProcessedChildren
: ProcessedChildren[]
: Resolved['relation']['isOneToOne'] extends true
? ProcessedChildren | null
: ProcessedChildren[]
: // If the relation is a self-reference it'll always be considered as reverse relationship
Resolved['relation']['referencedRelation'] extends CurrentTableOrView
? // It can either be a reverse reference via a column inclusion (eg: parent_id(*))
// in such case the result will be a single object
Resolved['relation']['match'] extends 'col'
? IsRelationNullable<
TablesAndViews<Schema>[CurrentTableOrView],
Resolved['relation']
> extends true
? ProcessedChildren | null
: ProcessedChildren
: // Or it can be a reference via the reference relation (eg: collections(*))
// in such case, the result will be an array of all the values (all collection with parent_id being the current id)
ProcessedChildren[]
: // Otherwise if it's a non self-reference reverse relationship it's a single object
IsRelationNullable<
TablesAndViews<Schema>[CurrentTableOrView],
Resolved['relation']
> extends true
? Field extends { innerJoin: true }
? ProcessedChildren
: ProcessedChildren | null
: ProcessedChildren
}
: {
[K in GetFieldNodeResultName<Field>]: SelectQueryError<'Failed to process embedded resource nodes.'> &
string
}
/**
* Processes a SpreadNode by processing its target node.
*
* @param Schema - Database schema.
* @param Row - The type of a row in the current table.
* @param RelationName - The name of the current table or view.
* @param Relationships - Relationships of the current table.
* @param Spread - The SpreadNode to process.
*/
type ProcessSpreadNode<
Schema extends GenericSchema,
Row extends Record<string, unknown>,
RelationName extends string,
Relationships extends GenericRelationship[],
Spread extends Ast.SpreadNode
> = ProcessNode<Schema, Row, RelationName, Relationships, Spread['target']> extends infer Result
? Result extends SelectQueryError<infer E>
? SelectQueryError<E>
: ExtractFirstProperty<Result> extends unknown[]
? {
[K in Spread['target']['name']]: SelectQueryError<`"${RelationName}" and "${Spread['target']['name']}" do not form a many-to-one or one-to-one relationship spread not possible`>
}
: ProcessSpreadNodeResult<Result>
: never
/**
* Helper type to process the result of a spread node.
*/
type ProcessSpreadNodeResult<Result> = Result extends Record<
string,
SelectQueryError<string> | null
>
? Result
: ExtractFirstProperty<Result> extends infer SpreadedObject
? ContainsNull<SpreadedObject> extends true
? Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] | null }, null>
: Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] }, null>
: SelectQueryError<'An error occurred spreading the object'>

View File

@@ -0,0 +1,115 @@
import type { GenericRelationship, GenericSchema, GenericTable, Prettify } from '../types'
export type { GenericRelationship, GenericSchema, GenericTable, Prettify }
export type AggregateWithoutColumnFunctions = 'count'
export type AggregateWithColumnFunctions =
| 'sum'
| 'avg'
| 'min'
| 'max'
| AggregateWithoutColumnFunctions
export type AggregateFunctions = AggregateWithColumnFunctions
export type Json =
| string
| number
| boolean
| null
| {
[key: string]: Json | undefined
}
| Json[]
type PostgresSQLNumberTypes = 'int2' | 'int4' | 'int8' | 'float4' | 'float8' | 'numeric'
type PostgresSQLStringTypes =
| 'bytea'
| 'bpchar'
| 'varchar'
| 'date'
| 'text'
| 'citext'
| 'time'
| 'timetz'
| 'timestamp'
| 'timestamptz'
| 'uuid'
| 'vector'
type SingleValuePostgreSQLTypes =
| PostgresSQLNumberTypes
| PostgresSQLStringTypes
| 'bool'
| 'json'
| 'jsonb'
| 'void'
| 'record'
| string
type ArrayPostgreSQLTypes = `_${SingleValuePostgreSQLTypes}`
type TypeScriptSingleValueTypes<T extends SingleValuePostgreSQLTypes> = T extends 'bool'
? boolean
: T extends PostgresSQLNumberTypes
? number
: T extends PostgresSQLStringTypes
? string
: T extends 'json' | 'jsonb'
? Json
: T extends 'void'
? undefined
: T extends 'record'
? Record<string, unknown>
: unknown
type StripUnderscore<T extends string> = T extends `_${infer U}` ? U : T
// Represents all possible PostgreSQL types, including array types, allow for custom types with 'string' in union
export type PostgreSQLTypes = SingleValuePostgreSQLTypes | ArrayPostgreSQLTypes
// Helper type to convert PostgreSQL types to their TypeScript equivalents
export type TypeScriptTypes<T extends PostgreSQLTypes> = T extends ArrayPostgreSQLTypes
? TypeScriptSingleValueTypes<StripUnderscore<Extract<T, SingleValuePostgreSQLTypes>>>[]
: TypeScriptSingleValueTypes<T>
// Utility types for working with unions
export type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (
k: infer I
) => void
? I
: never
export type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R
? R
: never
export type Push<T extends any[], V> = [...T, V]
// Converts a union type to a tuple type
export type UnionToTuple<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = N extends true
? []
: Push<UnionToTuple<Exclude<T, L>>, L>
export type UnionToArray<T> = UnionToTuple<T>
// Extracts the type of the first property in an object type
export type ExtractFirstProperty<T> = T extends { [K in keyof T]: infer U } ? U : never
// Type predicates
export type ContainsNull<T> = null extends T ? true : false
export type IsNonEmptyArray<T> = Exclude<T, undefined> extends readonly [unknown, ...unknown[]]
? true
: false
// Types for working with database schemas
export type TablesAndViews<Schema extends GenericSchema> = Schema['Tables'] &
Exclude<Schema['Views'], ''>
export type GetTableRelationships<
Schema extends GenericSchema,
Tname extends string
> = TablesAndViews<Schema>[Tname] extends { Relationships: infer R } ? R : false

View File

@@ -0,0 +1,580 @@
import { Ast } from './parser'
import {
AggregateFunctions,
ContainsNull,
GenericRelationship,
GenericSchema,
GenericTable,
IsNonEmptyArray,
TablesAndViews,
UnionToArray,
} from './types'
export type IsAny<T> = 0 extends 1 & T ? true : false
export type SelectQueryError<Message extends string> = { error: true } & Message
/*
** Because of pg-meta types generation there is some cases where a same relationship can be duplicated
** if the relation is across schemas and views this ensure that we dedup those relations and treat them
** as postgrest would.
** This is no longer the case and has been patched here: https://github.com/supabase/postgres-meta/pull/809
** But we still need this for retro-compatibilty with older generated types
** TODO: Remove this in next major version
*/
export type DeduplicateRelationships<T extends readonly unknown[]> = T extends readonly [
infer First,
...infer Rest
]
? First extends Rest[number]
? DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>
: [First, ...DeduplicateRelationships<Rest extends readonly unknown[] ? Rest : []>]
: T
export type GetFieldNodeResultName<Field extends Ast.FieldNode> = Field['alias'] extends string
? Field['alias']
: Field['aggregateFunction'] extends AggregateFunctions
? Field['aggregateFunction']
: Field['name']
type FilterRelationNodes<Nodes extends Ast.Node[]> = UnionToArray<
{
[K in keyof Nodes]: Nodes[K] extends Ast.SpreadNode
? Nodes[K]['target']
: Nodes[K] extends Ast.FieldNode
? IsNonEmptyArray<Nodes[K]['children']> extends true
? Nodes[K]
: never
: never
}[number]
>
type ResolveRelationships<
Schema extends GenericSchema,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.FieldNode[]
> = UnionToArray<{
[K in keyof Nodes]: Nodes[K] extends Ast.FieldNode
? ResolveRelationship<Schema, Relationships, Nodes[K], RelationName> extends infer Relation
? Relation extends {
relation: {
referencedRelation: string
foreignKeyName: string
match: string
}
from: string
}
? {
referencedTable: Relation['relation']['referencedRelation']
fkName: Relation['relation']['foreignKeyName']
from: Relation['from']
match: Relation['relation']['match']
fieldName: GetFieldNodeResultName<Nodes[K]>
}
: Relation
: never
: never
}>[0]
/**
* Checks if a relation is implicitly referenced twice, requiring disambiguation
*/
type IsDoubleReference<T, U> = T extends {
referencedTable: infer RT
fieldName: infer FN
match: infer M
}
? M extends 'col' | 'refrel'
? U extends { referencedTable: RT; fieldName: FN; match: M }
? true
: false
: false
: false
/**
* Compares one element with all other elements in the array to find duplicates
*/
type CheckDuplicates<Arr extends any[], Current> = Arr extends [infer Head, ...infer Tail]
? IsDoubleReference<Current, Head> extends true
? Head | CheckDuplicates<Tail, Current> // Return the Head if duplicate
: CheckDuplicates<Tail, Current> // Otherwise, continue checking
: never
/**
* Iterates over the elements of the array to find duplicates
*/
type FindDuplicatesWithinDeduplicated<Arr extends any[]> = Arr extends [infer Head, ...infer Tail]
? CheckDuplicates<Tail, Head> | FindDuplicatesWithinDeduplicated<Tail>
: never
type FindDuplicates<Arr extends any[]> = FindDuplicatesWithinDeduplicated<
DeduplicateRelationships<Arr>
>
export type CheckDuplicateEmbededReference<
Schema extends GenericSchema,
RelationName extends string,
Relationships extends GenericRelationship[],
Nodes extends Ast.Node[]
> = FilterRelationNodes<Nodes> extends infer RelationsNodes
? RelationsNodes extends Ast.FieldNode[]
? ResolveRelationships<
Schema,
RelationName,
Relationships,
RelationsNodes
> extends infer ResolvedRels
? ResolvedRels extends unknown[]
? FindDuplicates<ResolvedRels> extends infer Duplicates
? Duplicates extends never
? false
: Duplicates extends { fieldName: infer FieldName }
? FieldName extends string
? {
[K in FieldName]: SelectQueryError<`table "${RelationName}" specified more than once use hinting for desambiguation`>
}
: false
: false
: false
: false
: false
: false
: false
/**
* Returns a boolean representing whether there is a foreign key referencing
* a given relation.
*/
type HasFKeyToFRel<FRelName, Relationships> = Relationships extends [infer R]
? R extends { referencedRelation: FRelName }
? true
: false
: Relationships extends [infer R, ...infer Rest]
? HasFKeyToFRel<FRelName, [R]> extends true
? true
: HasFKeyToFRel<FRelName, Rest>
: false
/**
* Checks if there is more than one relation to a given foreign relation name in the Relationships.
*/
type HasMultipleFKeysToFRelDeduplicated<FRelName, Relationships> = Relationships extends [
infer R,
...infer Rest
]
? R extends { referencedRelation: FRelName }
? HasFKeyToFRel<FRelName, Rest> extends true
? true
: HasMultipleFKeysToFRelDeduplicated<FRelName, Rest>
: HasMultipleFKeysToFRelDeduplicated<FRelName, Rest>
: false
type HasMultipleFKeysToFRel<
FRelName,
Relationships extends unknown[]
> = HasMultipleFKeysToFRelDeduplicated<FRelName, DeduplicateRelationships<Relationships>>
type CheckRelationshipError<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FoundRelation
> = FoundRelation extends SelectQueryError<string>
? FoundRelation
: // If the relation is a reverse relation with no hint (matching by name)
FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName
name: string
}
direction: 'reverse'
}
? RelatedRelationName extends string
? // We check if there is possible confusion with other relations with this table
HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true
? // If there is, postgrest will fail at runtime, and require desambiguation via hinting
SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`>
: FoundRelation
: never
: // Same check for forward relationships, but we must gather the relationships from the found relation
FoundRelation extends {
relation: {
referencedRelation: infer RelatedRelationName
name: string
}
direction: 'forward'
from: infer From
}
? RelatedRelationName extends string
? From extends keyof TablesAndViews<Schema> & string
? HasMultipleFKeysToFRel<
RelatedRelationName,
TablesAndViews<Schema>[From]['Relationships']
> extends true
? SelectQueryError<`Could not embed because more than one relationship was found for '${From}' and '${RelatedRelationName}' you need to hint the column with ${From}!<columnName> ?`>
: FoundRelation
: never
: never
: FoundRelation
/**
* Resolves relationships for embedded resources and retrieves the referenced Table
*/
export type ResolveRelationship<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = ResolveReverseRelationship<
Schema,
Relationships,
Field,
CurrentTableOrView
> extends infer ReverseRelationship
? ReverseRelationship extends false
? CheckRelationshipError<
Schema,
Relationships,
CurrentTableOrView,
ResolveForwardRelationship<Schema, Field, CurrentTableOrView>
>
: CheckRelationshipError<Schema, Relationships, CurrentTableOrView, ReverseRelationship>
: never
/**
* Resolves reverse relationships (from children to parent)
*/
type ResolveReverseRelationship<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = FindFieldMatchingRelationships<Schema, Relationships, Field> extends infer FoundRelation
? FoundRelation extends never
? false
: FoundRelation extends { referencedRelation: infer RelatedRelationName }
? RelatedRelationName extends string
? RelatedRelationName extends keyof TablesAndViews<Schema>
? // If the relation was found via hinting we just return it without any more checks
FoundRelation extends { hint: string }
? {
referencedTable: TablesAndViews<Schema>[RelatedRelationName]
relation: FoundRelation
direction: 'reverse'
from: CurrentTableOrView
}
: // If the relation was found via implicit relation naming, we must ensure there is no conflicting matches
HasMultipleFKeysToFRel<RelatedRelationName, Relationships> extends true
? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}!<columnName> ?`>
: {
referencedTable: TablesAndViews<Schema>[RelatedRelationName]
relation: FoundRelation
direction: 'reverse'
from: CurrentTableOrView
}
: SelectQueryError<`Relation '${RelatedRelationName}' not found in schema.`>
: false
: false
: false
export type FindMatchingTableRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
value extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends keyof Schema['Tables']
? R extends { foreignKeyName: value }
? R & { match: 'fkname' }
: R extends { referencedRelation: value }
? R & { match: 'refrel' }
: R extends { columns: [value] }
? R & { match: 'col' }
: FindMatchingTableRelationships<Schema, Rest, value>
: FindMatchingTableRelationships<Schema, Rest, value>
: false
: false
: false
export type FindMatchingViewRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
value extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends keyof Schema['Views']
? R extends { foreignKeyName: value }
? R & { match: 'fkname' }
: R extends { referencedRelation: value }
? R & { match: 'refrel' }
: R extends { columns: [value] }
? R & { match: 'col' }
: FindMatchingViewRelationships<Schema, Rest, value>
: FindMatchingViewRelationships<Schema, Rest, value>
: false
: false
: false
export type FindMatchingHintTableRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
hint extends string,
name extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends name
? R extends { foreignKeyName: hint }
? R & { match: 'fkname' }
: R extends { referencedRelation: hint }
? R & { match: 'refrel' }
: R extends { columns: [hint] }
? R & { match: 'col' }
: FindMatchingHintTableRelationships<Schema, Rest, hint, name>
: FindMatchingHintTableRelationships<Schema, Rest, hint, name>
: false
: false
: false
export type FindMatchingHintViewRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
hint extends string,
name extends string
> = Relationships extends [infer R, ...infer Rest]
? Rest extends GenericRelationship[]
? R extends { referencedRelation: infer ReferencedRelation }
? ReferencedRelation extends name
? R extends { foreignKeyName: hint }
? R & { match: 'fkname' }
: R extends { referencedRelation: hint }
? R & { match: 'refrel' }
: R extends { columns: [hint] }
? R & { match: 'col' }
: FindMatchingHintViewRelationships<Schema, Rest, hint, name>
: FindMatchingHintViewRelationships<Schema, Rest, hint, name>
: false
: false
: false
type IsColumnsNullable<
Table extends Pick<GenericTable, 'Row'>,
Columns extends (keyof Table['Row'])[]
> = Columns extends [infer Column, ...infer Rest]
? Column extends keyof Table['Row']
? ContainsNull<Table['Row'][Column]> extends true
? true
: IsColumnsNullable<Table, Rest extends (keyof Table['Row'])[] ? Rest : []>
: false
: false
// Check weither or not a 1-1 relation is nullable by checking against the type of the columns
export type IsRelationNullable<
Table extends GenericTable,
Relation extends GenericRelationship
> = IsColumnsNullable<Table, Relation['columns']>
type TableForwardRelationships<
Schema extends GenericSchema,
TName
> = TName extends keyof TablesAndViews<Schema>
? UnionToArray<
RecursivelyFindRelationships<Schema, TName, keyof TablesAndViews<Schema>>
> extends infer R
? R extends (GenericRelationship & { from: keyof TablesAndViews<Schema> })[]
? R
: []
: []
: []
type RecursivelyFindRelationships<
Schema extends GenericSchema,
TName,
Keys extends keyof TablesAndViews<Schema>
> = Keys extends infer K
? K extends keyof TablesAndViews<Schema>
? FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K> extends never
? RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>>
:
| FilterRelationships<TablesAndViews<Schema>[K]['Relationships'], TName, K>
| RecursivelyFindRelationships<Schema, TName, Exclude<Keys, K>>
: false
: false
type FilterRelationships<R, TName, From> = R extends readonly (infer Rel)[]
? Rel extends { referencedRelation: TName }
? Rel & { from: From }
: never
: never
export type ResolveForwardRelationship<
Schema extends GenericSchema,
Field extends Ast.FieldNode,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string
> = FindFieldMatchingRelationships<
Schema,
TablesAndViews<Schema>[Field['name']]['Relationships'],
Ast.FieldNode & { name: CurrentTableOrView; hint: Field['hint'] }
> extends infer FoundByName
? FoundByName extends GenericRelationship
? {
referencedTable: TablesAndViews<Schema>[Field['name']]
relation: FoundByName
direction: 'forward'
from: Field['name']
type: 'found-by-name'
}
: FindFieldMatchingRelationships<
Schema,
TableForwardRelationships<Schema, CurrentTableOrView>,
Field
> extends infer FoundByMatch
? FoundByMatch extends GenericRelationship & {
from: keyof TablesAndViews<Schema>
}
? {
referencedTable: TablesAndViews<Schema>[FoundByMatch['from']]
relation: FoundByMatch
direction: 'forward'
from: CurrentTableOrView
type: 'found-by-match'
}
: FindJoinTableRelationship<
Schema,
CurrentTableOrView,
Field['name']
> extends infer FoundByJoinTable
? FoundByJoinTable extends GenericRelationship
? {
referencedTable: TablesAndViews<Schema>[FoundByJoinTable['referencedRelation']]
relation: FoundByJoinTable & { match: 'refrel' }
direction: 'forward'
from: CurrentTableOrView
type: 'found-by-join-table'
}
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
: SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field['name']}`>
/**
* Given a CurrentTableOrView, finds all join tables to this relation.
* For example, if products and categories are linked via product_categories table:
*
* @example
* Given:
* - CurrentTableView = 'products'
* - FieldName = "categories"
*
* It should return this relationship from product_categories:
* {
* foreignKeyName: "product_categories_category_id_fkey",
* columns: ["category_id"],
* isOneToOne: false,
* referencedRelation: "categories",
* referencedColumns: ["id"]
* }
*/
type ResolveJoinTableRelationship<
Schema extends GenericSchema,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FieldName extends string
> = {
[TableName in keyof TablesAndViews<Schema>]: DeduplicateRelationships<
TablesAndViews<Schema>[TableName]['Relationships']
> extends readonly (infer Rel)[]
? Rel extends { referencedRelation: CurrentTableOrView }
? DeduplicateRelationships<
TablesAndViews<Schema>[TableName]['Relationships']
> extends readonly (infer OtherRel)[]
? OtherRel extends { referencedRelation: FieldName }
? OtherRel
: never
: never
: never
: never
}[keyof TablesAndViews<Schema>]
export type FindJoinTableRelationship<
Schema extends GenericSchema,
CurrentTableOrView extends keyof TablesAndViews<Schema> & string,
FieldName extends string
> = ResolveJoinTableRelationship<Schema, CurrentTableOrView, FieldName> extends infer Result
? [Result] extends [never]
? false
: Result
: never
/**
* Finds a matching relationship based on the FieldNode's name and optional hint.
*/
export type FindFieldMatchingRelationships<
Schema extends GenericSchema,
Relationships extends GenericRelationship[],
Field extends Ast.FieldNode
> = Field extends { hint: string }
? FindMatchingHintTableRelationships<
Schema,
Relationships,
Field['hint'],
Field['name']
> extends GenericRelationship
? FindMatchingHintTableRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-table-via-hint'
hint: Field['hint']
}
: FindMatchingHintViewRelationships<
Schema,
Relationships,
Field['hint'],
Field['name']
> extends GenericRelationship
? FindMatchingHintViewRelationships<Schema, Relationships, Field['hint'], Field['name']> & {
branch: 'found-in-view-via-hint'
hint: Field['hint']
}
: SelectQueryError<'Failed to find matching relation via hint'>
: FindMatchingTableRelationships<Schema, Relationships, Field['name']> extends GenericRelationship
? FindMatchingTableRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-table-via-name'
name: Field['name']
}
: FindMatchingViewRelationships<Schema, Relationships, Field['name']> extends GenericRelationship
? FindMatchingViewRelationships<Schema, Relationships, Field['name']> & {
branch: 'found-in-view-via-name'
name: Field['name']
}
: SelectQueryError<'Failed to find matching relation via name'>
export type JsonPathToAccessor<Path extends string> = Path extends `${infer P1}->${infer P2}`
? P2 extends `>${infer Rest}` // Handle ->> operator
? JsonPathToAccessor<`${P1}.${Rest}`>
: P2 extends string // Handle -> operator
? JsonPathToAccessor<`${P1}.${P2}`>
: Path
: Path extends `>${infer Rest}` // Clean up any remaining > characters
? JsonPathToAccessor<Rest>
: Path extends `${infer P1}::${infer _}` // Handle type casting
? JsonPathToAccessor<P1>
: Path extends `${infer P1}${')' | ','}${infer _}` // Handle closing parenthesis and comma
? P1
: Path
export type JsonPathToType<T, Path extends string> = Path extends ''
? T
: ContainsNull<T> extends true
? JsonPathToType<Exclude<T, null>, Path>
: Path extends `${infer Key}.${infer Rest}`
? Key extends keyof T
? JsonPathToType<T[Key], Rest>
: never
: Path extends keyof T
? T[Path]
: never
export type IsStringUnion<T> = string extends T
? false
: T extends string
? [T] extends [never]
? false
: true
: false

188
node_modules/@supabase/postgrest-js/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,188 @@
import PostgrestError from './PostgrestError'
import { ContainsNull } from './select-query-parser/types'
import { SelectQueryError } from './select-query-parser/utils'
export type Fetch = typeof fetch
/**
* Response format
*
* {@link https://github.com/supabase/supabase-js/issues/32}
*/
interface PostgrestResponseBase {
status: number
statusText: string
}
export interface PostgrestResponseSuccess<T> extends PostgrestResponseBase {
error: null
data: T
count: number | null
}
export interface PostgrestResponseFailure extends PostgrestResponseBase {
error: PostgrestError
data: null
count: null
}
// TODO: in v3:
// - remove PostgrestResponse and PostgrestMaybeSingleResponse
// - rename PostgrestSingleResponse to PostgrestResponse
export type PostgrestSingleResponse<T> = PostgrestResponseSuccess<T> | PostgrestResponseFailure
export type PostgrestMaybeSingleResponse<T> = PostgrestSingleResponse<T | null>
export type PostgrestResponse<T> = PostgrestSingleResponse<T[]>
export type GenericRelationship = {
foreignKeyName: string
columns: string[]
isOneToOne?: boolean
referencedRelation: string
referencedColumns: string[]
}
export type GenericTable = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericUpdatableView = {
Row: Record<string, unknown>
Insert: Record<string, unknown>
Update: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericNonUpdatableView = {
Row: Record<string, unknown>
Relationships: GenericRelationship[]
}
export type GenericView = GenericUpdatableView | GenericNonUpdatableView
export type GenericFunction = {
Args: Record<string, unknown>
Returns: unknown
}
export type GenericSchema = {
Tables: Record<string, GenericTable>
Views: Record<string, GenericView>
Functions: Record<string, GenericFunction>
}
// https://twitter.com/mattpocockuk/status/1622730173446557697
export type Prettify<T> = { [K in keyof T]: T[K] } & {}
// https://github.com/sindresorhus/type-fest
export type SimplifyDeep<Type, ExcludeType = never> = ConditionalSimplifyDeep<
Type,
ExcludeType | NonRecursiveType | Set<unknown> | Map<unknown, unknown>,
object
>
type ConditionalSimplifyDeep<
Type,
ExcludeType = never,
IncludeType = unknown
> = Type extends ExcludeType
? Type
: Type extends IncludeType
? { [TypeKey in keyof Type]: ConditionalSimplifyDeep<Type[TypeKey], ExcludeType, IncludeType> }
: Type
type NonRecursiveType = BuiltIns | Function | (new (...arguments_: any[]) => unknown)
type BuiltIns = Primitive | void | Date | RegExp
type Primitive = null | undefined | string | number | boolean | symbol | bigint
export type IsValidResultOverride<Result, NewResult, ErrorResult, ErrorNewResult> =
Result extends any[]
? NewResult extends any[]
? // Both are arrays - valid
true
: ErrorResult
: NewResult extends any[]
? ErrorNewResult
: // Neither are arrays - valid
true
/**
* Utility type to check if array types match between Result and NewResult.
* Returns either the valid NewResult type or an error message type.
*/
export type CheckMatchingArrayTypes<Result, NewResult> =
// If the result is a QueryError we allow the user to override anyway
Result extends SelectQueryError<string>
? NewResult
: IsValidResultOverride<
Result,
NewResult,
{
Error: 'Type mismatch: Cannot cast array result to a single object. Use .overrideTypes<Array<YourType>> or .returns<Array<YourType>> (deprecated) for array results or .single() to convert the result to a single object'
},
{
Error: 'Type mismatch: Cannot cast single object to array type. Remove Array wrapper from return type or make sure you are not using .single() up in the calling chain'
}
> extends infer ValidationResult
? ValidationResult extends true
? // Preserve the optionality of the result if the overriden type is an object (case of chaining with `maybeSingle`)
ContainsNull<Result> extends true
? NewResult | null
: NewResult
: // contains the error
ValidationResult
: never
type Simplify<T> = T extends object ? { [K in keyof T]: T[K] } : T
// Extract only explicit (non-index-signature) keys.
type ExplicitKeys<T> = {
[K in keyof T]: string extends K ? never : K
}[keyof T]
type MergeExplicit<New, Row> = {
// We merge all the explicit keys which allows merge and override of types like
// { [key: string]: unknown } and { someSpecificKey: boolean }
[K in ExplicitKeys<New> | ExplicitKeys<Row>]: K extends keyof New
? K extends keyof Row
? Row[K] extends SelectQueryError<string>
? New[K]
: // Check if the override is on a embedded relation (array)
New[K] extends any[]
? Row[K] extends any[]
? Array<Simplify<MergeDeep<NonNullable<New[K][number]>, NonNullable<Row[K][number]>>>>
: New[K]
: // Check if both properties are objects omitting a potential null union
IsPlainObject<NonNullable<New[K]>> extends true
? IsPlainObject<NonNullable<Row[K]>> extends true
? // If they are, use the new override as source of truth for the optionality
ContainsNull<New[K]> extends true
? // If the override wants to preserve optionality
Simplify<MergeDeep<NonNullable<New[K]>, NonNullable<Row[K]>>> | null
: // If the override wants to enforce non-null result
Simplify<MergeDeep<New[K], NonNullable<Row[K]>>>
: New[K] // Override with New type if Row isn't an object
: New[K] // Override primitives with New type
: New[K] // Add new properties from New
: K extends keyof Row
? Row[K] // Keep existing properties not in New
: never
}
type MergeDeep<New, Row> = Simplify<
MergeExplicit<New, Row> &
// Intersection here is to restore dynamic keys into the merging result
// eg:
// {[key: number]: string}
// or Record<string, number | null>
(string extends keyof Row ? { [K: string]: Row[string] } : {})
>
// Helper to check if a type is a plain object (not an array)
type IsPlainObject<T> = T extends any[] ? false : T extends object ? true : false
// Merge the new result with the original (Result) when merge option is true.
// If NewResult is an array, merge each element.
export type MergePartialResult<NewResult, Result, Options> = Options extends { merge: true }
? Result extends any[]
? NewResult extends any[]
? Array<Simplify<MergeDeep<NewResult[number], Result[number]>>>
: never
: Simplify<MergeDeep<NewResult, Result>>
: NewResult

1
node_modules/@supabase/postgrest-js/src/version.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export const version = '1.19.4'