Skip to content

Commit

Permalink
Fix compatibility with Prisma v5.15+
Browse files Browse the repository at this point in the history
  • Loading branch information
exAspArk committed Aug 2, 2024
1 parent 8fd33e9 commit 5a667b7
Show file tree
Hide file tree
Showing 5 changed files with 122 additions and 87 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Changelog

#### [v0.4.1](https://github.com/BemiHQ/bemi-prisma/compare/v0.4.0...v0.4.1) - 2024-08-02

- Fix compatibility with Prisma v5.15+

#### [v0.4.0](https://github.com/BemiHQ/bemi-prisma/compare/v0.3.0...v0.4.0) - 2024-04-16

- Fix Next.js actions by removing `@prisma/internals` as a dependency
Expand Down
5 changes: 5 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
build:
pnpm run build

publish:
npm publish --access public
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@bemi-db/prisma",
"version": "0.4.0",
"version": "0.4.1",
"description": "Automatic data change tracking for Prisma",
"main": "dist/index.js",
"module": "./dist/index.mjs",
Expand All @@ -22,14 +22,14 @@
},
"homepage": "https://github.com/BemiHQ/bemi-prisma#readme",
"dependencies": {
"@prisma/driver-adapter-utils": "~5.9.1",
"@prisma/driver-adapter-utils": "~5.15.0",
"commander": "^11.1.0",
"kleur": "^4.1.5",
"pg": "^8.11.5",
"postgres-array": "^3.0.2"
},
"peerDependencies": {
"@prisma/client": "^5.9.1"
"@prisma/client": "^5.15.0"
},
"devDependencies": {
"@types/express": "^4.17.21",
Expand Down
158 changes: 77 additions & 81 deletions src/conversion.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
// Last SHA: 4048b4add3aa2695a00db6e83f577b8abaaafab1
// Last commit: https://github.com/prisma/prisma/commit/b36b73c2c349f80dc456dfd321f3c4cdc2aacfed#diff-848cf139de14ecb4c49c2e3f52703fecef4789f51052651f22ca2c5caf0afe99

import { type ColumnType, ColumnTypeEnum, JsonNullMarker } from '@prisma/driver-adapter-utils'
// @ts-ignore: this is used to avoid the `Module '"<path>/node_modules/@types/pg/index"' has no default export.` error.
import pg from 'pg'
import { parse as parseArray } from 'postgres-array'

const { types } = pg
const { builtins: ScalarColumnType, getTypeParser, setTypeParser } = types
const { builtins: ScalarColumnType, getTypeParser } = types
import { type ColumnType, ColumnTypeEnum } from '@prisma/driver-adapter-utils'

/**
* PostgreSQL array column types (not defined in ScalarColumnType).
Expand Down Expand Up @@ -175,45 +175,45 @@ export class UnsupportedNativeDataType extends Error {
*/
export function fieldToColumnType(fieldTypeId: number): ColumnType {
switch (fieldTypeId) {
case ScalarColumnType['INT2']:
case ScalarColumnType['INT4']:
case ScalarColumnType.INT2:
case ScalarColumnType.INT4:
return ColumnTypeEnum.Int32
case ScalarColumnType['INT8']:
case ScalarColumnType.INT8:
return ColumnTypeEnum.Int64
case ScalarColumnType['FLOAT4']:
case ScalarColumnType.FLOAT4:
return ColumnTypeEnum.Float
case ScalarColumnType['FLOAT8']:
case ScalarColumnType.FLOAT8:
return ColumnTypeEnum.Double
case ScalarColumnType['BOOL']:
case ScalarColumnType.BOOL:
return ColumnTypeEnum.Boolean
case ScalarColumnType['DATE']:
case ScalarColumnType.DATE:
return ColumnTypeEnum.Date
case ScalarColumnType['TIME']:
case ScalarColumnType['TIMETZ']:
case ScalarColumnType.TIME:
case ScalarColumnType.TIMETZ:
return ColumnTypeEnum.Time
case ScalarColumnType['TIMESTAMP']:
case ScalarColumnType['TIMESTAMPTZ']:
case ScalarColumnType.TIMESTAMP:
case ScalarColumnType.TIMESTAMPTZ:
return ColumnTypeEnum.DateTime
case ScalarColumnType['NUMERIC']:
case ScalarColumnType['MONEY']:
case ScalarColumnType.NUMERIC:
case ScalarColumnType.MONEY:
return ColumnTypeEnum.Numeric
case ScalarColumnType['JSON']:
case ScalarColumnType['JSONB']:
case ScalarColumnType.JSON:
case ScalarColumnType.JSONB:
return ColumnTypeEnum.Json
case ScalarColumnType['UUID']:
case ScalarColumnType.UUID:
return ColumnTypeEnum.Uuid
case ScalarColumnType['OID']:
case ScalarColumnType.OID:
return ColumnTypeEnum.Int64
case ScalarColumnType['BPCHAR']:
case ScalarColumnType['TEXT']:
case ScalarColumnType['VARCHAR']:
case ScalarColumnType['BIT']:
case ScalarColumnType['VARBIT']:
case ScalarColumnType['INET']:
case ScalarColumnType['CIDR']:
case ScalarColumnType['XML']:
case ScalarColumnType.BPCHAR:
case ScalarColumnType.TEXT:
case ScalarColumnType.VARCHAR:
case ScalarColumnType.BIT:
case ScalarColumnType.VARBIT:
case ScalarColumnType.INET:
case ScalarColumnType.CIDR:
case ScalarColumnType.XML:
return ColumnTypeEnum.Text
case ScalarColumnType['BYTEA']:
case ScalarColumnType.BYTEA:
return ColumnTypeEnum.Bytes
case ArrayColumnType.INT2_ARRAY:
case ArrayColumnType.INT4_ARRAY:
Expand Down Expand Up @@ -280,17 +280,23 @@ function normalize_numeric(numeric: string): string {
return numeric
}

setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric)
setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric))

/****************************/
/* Time-related data-types */
/****************************/

/*
* DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD
*/

function normalize_date(date: string): string {
return date
}

/*
* TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format
* ex: 1996-12-19T16:39:57-08:00
*/

function normalize_timestamp(time: string): string {
return time
}
Expand All @@ -313,25 +319,6 @@ function normalize_timez(time: string): string {
return time.split('+')[0]
}

setTypeParser(ScalarColumnType.TIME, normalize_time)
setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time))
setTypeParser(ScalarColumnType.TIMETZ, normalize_timez)

/*
* DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD
*/

setTypeParser(ScalarColumnType.DATE, normalize_date)
setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date))

/*
* TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format
* ex: 1996-12-19T16:39:57-08:00
*/
setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp)
setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp))
setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz)

/******************/
/* Money handling */
/******************/
Expand All @@ -340,30 +327,19 @@ function normalize_money(money: string): string {
return money.slice(1)
}

setTypeParser(ScalarColumnType.MONEY, normalize_money)
setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money))

/*****************/
/* JSON handling */
/*****************/

/**
* JsonNull are stored in JSON strings as the string "null", distinguishable from
* the `null` value which is used by the driver to represent the database NULL.
* By default, JSON and JSONB columns use JSON.parse to parse a JSON column value
* and this will lead to serde_json::Value::Null in Rust, which will be interpreted
* as DbNull.
*
* By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and
* convert it to QuaintValue::Json(Some(Null)).
* We hand off JSON handling entirely to engines, so we keep it
* stringified here. This function needs to exist as otherwise
* the default type parser attempts to deserialise it.
*/
function toJson(json: string): unknown {
return json === 'null' ? JsonNullMarker : JSON.parse(json)
return json
}

setTypeParser(ScalarColumnType.JSONB, toJson)
setTypeParser(ScalarColumnType.JSON, toJson)

/************************/
/* Binary data handling */
/************************/
Expand All @@ -384,37 +360,57 @@ function encodeBuffer(buffer: Buffer) {
*/

const parsePgBytes = getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer
/**
* Convert bytes to a JSON-encodable representation since we can't
* currently send a parsed Buffer or ArrayBuffer across JS to Rust
* boundary.
*/
function convertBytes(serializedBytes: string): number[] {
const buffer = parsePgBytes(serializedBytes)
return encodeBuffer(buffer)
}

setTypeParser(ScalarColumnType.BYTEA, convertBytes)

/*
* BYTEA_ARRAY - arrays of arbitrary raw binary strings
*/

const parseBytesArray = getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[]

setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => {

// PATCH: Fix TypeScript errors
function normalizeByteaArray(serializedBytesArray: any) {
// PATCH: end
const buffers = parseBytesArray(serializedBytesArray)
return buffers.map((buf) => (buf ? encodeBuffer(buf) : null))
})
}

/**
* Convert bytes to a JSON-encodable representation since we can't
* currently send a parsed Buffer or ArrayBuffer across JS to Rust
* boundary.
*/
function convertBytes(serializedBytes: string): number[] {
const buffer = parsePgBytes(serializedBytes)
return encodeBuffer(buffer)
}

/* BIT_ARRAY, VARBIT_ARRAY */

function normalizeBit(bit: string): string {
return bit
}

setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit))
setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit))
export const customParsers = {
[ScalarColumnType.NUMERIC]: normalize_numeric,
[ArrayColumnType.NUMERIC_ARRAY]: normalize_array(normalize_numeric),
[ScalarColumnType.TIME]: normalize_time,
[ArrayColumnType.TIME_ARRAY]: normalize_array(normalize_time),
[ScalarColumnType.TIMETZ]: normalize_timez,
[ScalarColumnType.DATE]: normalize_date,
[ArrayColumnType.DATE_ARRAY]: normalize_array(normalize_date),
[ScalarColumnType.TIMESTAMP]: normalize_timestamp,
[ArrayColumnType.TIMESTAMP_ARRAY]: normalize_array(normalize_timestamp),
[ScalarColumnType.TIMESTAMPTZ]: normalize_timestampz,
[ScalarColumnType.MONEY]: normalize_money,
[ArrayColumnType.MONEY_ARRAY]: normalize_array(normalize_money),
[ScalarColumnType.JSON]: toJson,
[ScalarColumnType.JSONB]: toJson,
[ScalarColumnType.BYTEA]: convertBytes,
[ArrayColumnType.BYTEA_ARRAY]: normalizeByteaArray,
[ArrayColumnType.BIT_ARRAY]: normalize_array(normalizeBit),
[ArrayColumnType.VARBIT_ARRAY]: normalize_array(normalizeBit),
}

// https://github.com/brianc/node-postgres/pull/2930
export function fixArrayBufferValues(values: unknown[]) {
Expand Down
36 changes: 33 additions & 3 deletions src/pg-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ import { Debug, err, ok } from '@prisma/driver-adapter-utils'
// @ts-ignore: this is used to avoid the `Module '"<path>/node_modules/@types/pg/index"' has no default export.` error.
import pg from 'pg'

import { fieldToColumnType, fixArrayBufferValues, UnsupportedNativeDataType } from './conversion'
import { customParsers, fieldToColumnType, fixArrayBufferValues, UnsupportedNativeDataType } from './conversion'

const types = pg.types

const debug = Debug('prisma:driver-adapter:pg')

Expand All @@ -33,7 +35,6 @@ import {
} from './pg-utils'
// PATCH: end

// eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents
class PgQueryable<ClientT extends StdClient | TransactionClient> implements Queryable {
readonly provider = 'postgres'
readonly adapterName = '@prisma/adapter-pg'
Expand Down Expand Up @@ -179,7 +180,36 @@ class PgQueryable<ClientT extends StdClient | TransactionClient> implements Quer
logger.log(`${logger.tags['info'] ?? ''}`, text)
}

const result = await this.client.query({ text, values: fixArrayBufferValues(values), rowMode: 'array' })
const result = await this.client.query(
{
text,
values: fixArrayBufferValues(values),
rowMode: 'array',
types: {
// This is the error expected:
// No overload matches this call.
// The last overload gave the following error.
// Type '(oid: number, format?: any) => (json: string) => unknown' is not assignable to type '{ <T>(oid: number): TypeParser<string, string | T>; <T>(oid: number, format: "text"): TypeParser<string, string | T>; <T>(oid: number, format: "binary"): TypeParser<...>; }'.
// Type '(json: string) => unknown' is not assignable to type 'TypeParser<Buffer, any>'.
// Types of parameters 'json' and 'value' are incompatible.
// Type 'Buffer' is not assignable to type 'string'.ts(2769)
//
// Because pg-types types expect us to handle both binary and text protocol versions,
// where as far we can see, pg will ever pass only text version.
//
// @ts-expect-error
getTypeParser: (oid: number, format: binary) => {
if (format === 'text' && customParsers[oid]) {
return customParsers[oid]
}

return types.getTypeParser(oid, format)
},
},
},
fixArrayBufferValues(values),
)

return result
}
// PATCH: end
Expand Down

0 comments on commit 5a667b7

Please sign in to comment.