diff --git a/.changeset/config.json b/.changeset/config.json index 39874eb3c..b6606880a 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -8,6 +8,7 @@ "ponder-examples-feature-blocks", "ponder-examples-feature-factory", "ponder-examples-feature-filter", + "ponder-examples-feature-api-functions", "ponder-examples-feature-multichain", "ponder-examples-feature-call-traces", "ponder-examples-feature-proxy", @@ -22,7 +23,10 @@ "ponder-examples-with-foundry-ponder", "ponder-examples-with-foundry-foundry", "ponder-examples-with-nextjs", - "ponder-examples-with-nextjs-ponder" + "ponder-examples-with-nextjs-ponder", + "ponder-examples-with-trpc", + "ponder-examples-with-trpc-ponder", + "ponder-examples-with-trpc-client" ], "linked": [], "access": "public", diff --git a/.changeset/shy-donuts-battle.md b/.changeset/shy-donuts-battle.md new file mode 100644 index 000000000..4cbc9f011 --- /dev/null +++ b/.changeset/shy-donuts-battle.md @@ -0,0 +1,6 @@ +--- +"create-ponder": minor +"@ponder/core": minor +--- + +Introduced API functions. [Read more](https://ponder.sh/docs/query/api-functions). Please read the [migration guide](https://ponder.sh/docs/migration-guide) for more information. diff --git a/docs/pages/docs/_meta.ts b/docs/pages/docs/_meta.ts index 5a7a0975b..a59037dab 100644 --- a/docs/pages/docs/_meta.ts +++ b/docs/pages/docs/_meta.ts @@ -30,11 +30,11 @@ export default { }, indexing: { display: "children", title: "Indexing" }, - "-- Query the database": { + "-- Query": { type: "separator", - title: "Query the database", + title: "Query", }, - query: { display: "children", title: "Query the database" }, + query: { display: "children", title: "Query" }, "-- Production": { type: "separator", diff --git a/docs/pages/docs/indexing/create-update-records.mdx b/docs/pages/docs/indexing/create-update-records.mdx index 8585da6fb..6876c8132 100644 --- a/docs/pages/docs/indexing/create-update-records.mdx +++ b/docs/pages/docs/indexing/create-update-records.mdx @@ -1,11 +1,11 @@ --- -title: "Create and Update Records" +title: "Create and update records" description: "Learn how to create and update records in the Ponder database." --- # Create & update records -Ponder's store API is inspired by the [Prisma Client API](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#model-queries). The store supports the following methods. +The **Store API** is inspired by [Prisma](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#model-queries) and supports the following methods. - [`create`](#create) - [`update`](#update) diff --git a/docs/pages/docs/migration-guide.mdx b/docs/pages/docs/migration-guide.mdx index e5ad2e3ea..297d35a38 100644 --- a/docs/pages/docs/migration-guide.mdx +++ b/docs/pages/docs/migration-guide.mdx @@ -3,12 +3,38 @@ title: "Migration Guide" description: "A guide for migrating to new versions of Ponder." --- -import { FileTree, Steps } from "nextra/components"; -import { Callout } from "nextra/components"; +import { FileTree, Steps, Tabs, Callout } from "nextra/components"; import Architecture from "../../public/architecture.svg"; # Migration guide +## 0.5.0 + +**Breaking:** This release adds [Hono](https://hono.dev) as a peer dependency. After upgrading, install `hono` in your project. + +{/* prettier-ignore */} + + +```bash filename="shell" +pnpm add hono@latest +``` + + +```bash filename="shell" +yarn add hono@latest +``` + + +```bash filename="shell" +npm install hono@latest +``` + + + +### Introduced API functions + +This release added support for API functions. [Read more](/docs/query/api-functions). + ## 0.4.0 This release changes the location of database tables when using both SQLite and Postgres. @@ -17,11 +43,11 @@ It **does not** require any changes to your application code, and does not bust Please read the new docs on [direct SQL](/docs/query/direct-sql) for a detailed overview. -#### SQLite +### SQLite Ponder now uses the `.ponder/sqlite/public.db` file for indexed tables. Before, the tables were present as views in the `.ponder/sqlite/ponder.db`. Now, the`.ponder/sqlite/ponder.db` file is only used internally by Ponder. -#### Postgres +### Postgres Ponder now creates a table in the `public` schema for each table in `ponder.schema.ts`. Before, Ponder created them as views in the `ponder` schema. @@ -31,7 +57,7 @@ This also changes the zero-downtime behavior on platforms like Railway. For more - [Direct SQL](/docs/query/direct-sql) - [Zero-downtime deployments](/docs/production/zero-downtime) -#### Postgres table cleanup +### Postgres table cleanup After upgrading to `0.4.x`, you can run the following Postgres SQL script to clean up stale tables and views created by `0.3.x` Ponder apps. diff --git a/docs/pages/docs/query/_meta.ts b/docs/pages/docs/query/_meta.ts index 19f84ab05..49e84a20c 100644 --- a/docs/pages/docs/query/_meta.ts +++ b/docs/pages/docs/query/_meta.ts @@ -1,4 +1,5 @@ export default { + "api-functions": "API functions", "graphql": "GraphQL", "direct-sql": "Direct SQL", }; diff --git a/docs/pages/docs/query/api-functions.mdx b/docs/pages/docs/query/api-functions.mdx new file mode 100644 index 000000000..f18d13679 --- /dev/null +++ b/docs/pages/docs/query/api-functions.mdx @@ -0,0 +1,254 @@ +--- +title: "API functions" +description: "Use API functions to customize the API layer of your app." +--- + +import { Callout, Steps } from "nextra/components"; + +# API functions + +**API functions** are user-defined TypeScript functions that handle web requests. You can use them to customize the API layer of your app with complex SQL queries, authentication, data from external sources, and more. + +API functions are built on top of [Hono](https://hono.dev/), a fast and lightweight routing framework. + +## Example projects + +These example apps demonstrate how to use API functions. + +- [**Basic**](https://github.com/ponder-sh/ponder/tree/main/examples/feature-api-functions/src/api/index.ts) - An ERC20 app that responds to `GET` requests and uses the [Select API](#query-the-database) to build custom SQL queries. +- [**tRPC**](https://github.com/ponder-sh/ponder/tree/main/examples/with-trpc/client/index.ts) - An app that creates a [tRPC](https://trpc.io) server and a script that uses a tRPC client with end-to-end type safety. + +## Get started + + + +### Upgrade to `>=0.5.0` + +API functions are available starting from version `0.5.0`. Read the [migration guide](/docs/migration-guide#050) for more details. + +### Create `src/api/index.ts` file + +To enable API functions, create a file named `src/api/index.ts` with the following code. You can register API functions in any `.ts` file in the `src/api/` directory. + +```ts filename="src/api/index.ts" +import { ponder } from "@/generated"; + +ponder.get("/hello", (c) => { + return c.text("Hello, world!"); +}); +``` + +### Send a request + +Visit `http://localhost:42069/hello` in your browser to see the response. + +```plaintext filename="Response" +Hello, world! +``` + +### Register GraphQL middleware + + + Once you create an API function file, you have "opted in" to API functions and + your app **will not** serve the standard GraphQL API by default. + + +To continue using the standard GraphQL API, register the `graphql` middleware exported from `@ponder/core`. + +```ts filename="src/api/index.ts" {2,4-5} +import { ponder } from "@/generated"; +import { graphql } from "@ponder/core"; + +ponder.use("/", graphql()); +ponder.use("/graphql", graphql()); + +// ... +``` + + + +## Query the database + +API functions can query the database using the read-only **Select API**, a type-safe query builder powered by [Drizzle](https://orm.drizzle.team/docs/overview). The Select API supports complex filters, joins, aggregations, set operations, and more. + + + The Select API is only available within API functions. Indexing functions use + the [Store API](/docs/indexing/create-update-records) (`findUnique`, `upsert`, + etc) which supports writes and is reorg-aware. + + +### Select + +The API function context contains a built-in database client (`db`) and an object for each table in your schema (`tables`). These objects are type-safe – changes to your `ponder.schema.ts` file will be reflected immediately. + +To build a query, use `c.db.select(){:ts}` and include a table object using `.from(c.tables.TableName){:ts}`. + +
+ +```ts filename="ponder.schema.ts" {4} +import { createSchema } from "@ponder/core"; + +export default createSchema((p) => ({ + Account: p.createTable({ + id: p.string(), + balance: p.bigint(), + }), +})); +``` + +```ts filename="src/api/index.ts" {6} +import { ponder } from "@/generated"; + +ponder.get("/account/:address", async (c) => { + const address = c.req.param("address"); + + const account = await c.db.select(c.tables.Account).limit(1); + + return c.json(account); +}); +``` + +
+ +To build more complex queries, use `join`, `groupBy`, `where`, `orderBy`, `limit`, and other methods. Drizzle's filter & conditional operators (like `eq`, `gte`, and `or`) are re-exported by `@ponder/core`. + +For more details, please reference the [Drizzle documentation](https://orm.drizzle.team/docs/select). + +```ts filename="src/api/index.ts" {2,7-11} +import { ponder } from "@/generated"; +import { gte } from "@ponder/core"; + +ponder.get("/whales", async (c) => { + const { Account } = c.tables; + + const whales = await c.db + .select({ address: Account.id, balance: Account.balance }) + .from(Account.id) + .where(gte(TransferEvent.balance, 1_000_000_000n)) + .limit(1); + + return c.json(whales); +}); +``` + +### Execute + +To run raw SQL queries, use `db.execute(...){:ts}` with the `sql` utility function. [Read more](https://orm.drizzle.team/docs/sql) about the `sql` function. + +```ts filename="src/api/index.ts" {2,7-9} +import { ponder } from "@/generated"; +import { sql } from "@ponder/core"; + +ponder.get("/:token/ticker", async (c) => { + const token = c.req.param("token"); + + const result = await c.db.execute( + sql`SELECT ticker FROM "Token" WHERE id = ${token}` + ); + const ticker = result.rows[0]?.ticker; + + return c.text(ticker); +}); +``` + +## API reference + +### `get()` + +Use `ponder.get()` to handle HTTP `GET` requests. The `c` context object contains the request, response helpers, and the database connection. + +```ts filename="src/api/index.ts" {3,5} +import { ponder } from "@/generated"; + +ponder.get("/account/:address", async (c) => { + const { Account } = c.tables; + const address = c.req.param("address"); + + const account = await c.db + .select() + .from(Account) + .where({ id: address }) + .first(); + + if (account) { + return c.json(account); + } else { + return c.status(404).json({ error: "Account not found" }); + } +}); +``` + +### `post()` + + + API functions cannot write to the database, even when handling `POST` + requests. + + +Use `ponder.post()` to handle HTTP `POST` requests. + +In this example, we calculate the volume of transfers for each recipient within a given time range. The `fromTimestamp` and `toTimestamp` parameters are passed in the request body. + +```ts filename="src/api/index.ts" {5,7-9} +import { ponder } from "@/generated"; +import { and, gte, sum } from "@ponder/core"; + +ponder.post("/volume", async (c) => { + const { TransferEvent } = c.tables; + + const body = await c.req.json(); + const { fromTimestamp, toTimestamp } = body; + + const volumeChartData = await c.db + .select({ + to: TransferEvent.toId, + volume: sum(TransferEvent.amount), + }) + .from(TransferEvent) + .groupBy(TransferEvent.toId) + .where( + and( + gte(TransferEvent.timestamp, fromTimestamp), + lte(TransferEvent.timestamp, toTimestamp) + ) + ) + .limit(1); + + return c.json(volumeChartData); +}); +``` + +### `use()` + +Use `ponder.use(...){:ts}` to add middleware to your API functions. Middleware functions can modify the request and response objects, add logs, authenticate requests, and more. [Read more](https://hono.dev/docs/guides/middleware) about Hono middleware. + +```ts filename="src/api/index.ts" {3} +import { ponder } from "@/generated"; + +ponder.use((c) => { + console.log("Request received:", c.req.url); + return c.next(); +}); +``` + +### `hono` + +Use `ponder.hono` to access the underlying Hono instance. + +```ts filename="src/api/index.ts" {3} +import { ponder } from "@/generated"; + +ponder.hono.notFound((c) => { + return c.text("Custom 404 Message", 404); +}); + +// ... +``` + +## Reserved routes + +If you register API functions that conflict with these internal routes, the build will fail. + +- `/health`: Returns a `200` status code after the app has completed historical indexing OR the healthcheck timeout has expired, whichever comes first. [Read more](/docs/production/zero-downtime) about healthchecks. +- `/metrics`: Returns Prometheus metrics. [Read more](/docs/advanced/metrics) about metrics. +- `/status`: Returns indexing status object. [Read more](/docs/advanced/status) about indexing status. diff --git a/examples/feature-api-functions/.env.example b/examples/feature-api-functions/.env.example new file mode 100644 index 000000000..f7745c21c --- /dev/null +++ b/examples/feature-api-functions/.env.example @@ -0,0 +1,5 @@ +# Mainnet RPC URL used for fetching blockchain data. Alchemy is recommended. +PONDER_RPC_URL_1=https://eth-mainnet.g.alchemy.com/v2/... + +# (Optional) Postgres database URL. If not provided, SQLite will be used. +DATABASE_URL= \ No newline at end of file diff --git a/examples/feature-api-functions/.eslintrc.json b/examples/feature-api-functions/.eslintrc.json new file mode 100644 index 000000000..359e2bbfa --- /dev/null +++ b/examples/feature-api-functions/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "ponder" +} diff --git a/examples/feature-api-functions/.gitignore b/examples/feature-api-functions/.gitignore new file mode 100644 index 000000000..f0c7e1177 --- /dev/null +++ b/examples/feature-api-functions/.gitignore @@ -0,0 +1,18 @@ +# Dependencies +/node_modules + +# Debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# Misc +.DS_Store + +# Env files +.env*.local + +# Ponder +/generated/ +/.ponder/ diff --git a/examples/feature-api-functions/abis/erc20ABI.ts b/examples/feature-api-functions/abis/erc20ABI.ts new file mode 100644 index 000000000..94cbc6a33 --- /dev/null +++ b/examples/feature-api-functions/abis/erc20ABI.ts @@ -0,0 +1,147 @@ +export const erc20ABI = [ + { + stateMutability: "view", + type: "function", + inputs: [], + name: "DOMAIN_SEPARATOR", + outputs: [{ name: "", internalType: "bytes32", type: "bytes32" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [ + { name: "", internalType: "address", type: "address" }, + { name: "", internalType: "address", type: "address" }, + ], + name: "allowance", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "spender", internalType: "address", type: "address" }, + { name: "amount", internalType: "uint256", type: "uint256" }, + ], + name: "approve", + outputs: [{ name: "", internalType: "bool", type: "bool" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [{ name: "", internalType: "address", type: "address" }], + name: "balanceOf", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "decimals", + outputs: [{ name: "", internalType: "uint8", type: "uint8" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "name", + outputs: [{ name: "", internalType: "string", type: "string" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [{ name: "", internalType: "address", type: "address" }], + name: "nonces", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "owner", internalType: "address", type: "address" }, + { name: "spender", internalType: "address", type: "address" }, + { name: "value", internalType: "uint256", type: "uint256" }, + { name: "deadline", internalType: "uint256", type: "uint256" }, + { name: "v", internalType: "uint8", type: "uint8" }, + { name: "r", internalType: "bytes32", type: "bytes32" }, + { name: "s", internalType: "bytes32", type: "bytes32" }, + ], + name: "permit", + outputs: [], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "symbol", + outputs: [{ name: "", internalType: "string", type: "string" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "totalSupply", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "to", internalType: "address", type: "address" }, + { name: "amount", internalType: "uint256", type: "uint256" }, + ], + name: "transfer", + outputs: [{ name: "", internalType: "bool", type: "bool" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "from", internalType: "address", type: "address" }, + { name: "to", internalType: "address", type: "address" }, + { name: "amount", internalType: "uint256", type: "uint256" }, + ], + name: "transferFrom", + outputs: [{ name: "", internalType: "bool", type: "bool" }], + }, + { + type: "event", + anonymous: false, + inputs: [ + { + name: "owner", + internalType: "address", + type: "address", + indexed: true, + }, + { + name: "spender", + internalType: "address", + type: "address", + indexed: true, + }, + { + name: "amount", + internalType: "uint256", + type: "uint256", + indexed: false, + }, + ], + name: "Approval", + }, + { + type: "event", + anonymous: false, + inputs: [ + { name: "from", internalType: "address", type: "address", indexed: true }, + { name: "to", internalType: "address", type: "address", indexed: true }, + { + name: "amount", + internalType: "uint256", + type: "uint256", + indexed: false, + }, + ], + name: "Transfer", + }, +] as const; diff --git a/examples/feature-api-functions/package.json b/examples/feature-api-functions/package.json new file mode 100644 index 000000000..9578a0a34 --- /dev/null +++ b/examples/feature-api-functions/package.json @@ -0,0 +1,27 @@ +{ + "name": "ponder-examples-feature-api-functions", + "private": true, + "type": "module", + "scripts": { + "dev": "ponder dev", + "start": "ponder start", + "codegen": "ponder codegen", + "serve": "ponder serve", + "lint": "eslint .", + "typecheck": "tsc" + }, + "dependencies": { + "@ponder/core": "workspace:*", + "hono": "^4.5.0", + "viem": "^1.19.9" + }, + "devDependencies": { + "@types/node": "^20.10.0", + "eslint": "^8.54.0", + "eslint-config-ponder": "workspace:*", + "typescript": "^5.3.2" + }, + "engines": { + "node": ">=18.14" + } +} diff --git a/examples/feature-api-functions/ponder-env.d.ts b/examples/feature-api-functions/ponder-env.d.ts new file mode 100644 index 000000000..03126bf92 --- /dev/null +++ b/examples/feature-api-functions/ponder-env.d.ts @@ -0,0 +1,28 @@ +// This file enables type checking and editor autocomplete for this Ponder project. +// After upgrading, you may find that changes have been made to this file. +// If this happens, please commit the changes. Do not manually edit this file. +// See https://ponder.sh/docs/getting-started/installation#typescript for more information. + +declare module "@/generated" { + import type { Virtual } from "@ponder/core"; + + type config = typeof import("./ponder.config.ts").default; + type schema = typeof import("./ponder.schema.ts").default; + + export const ponder: Virtual.Registry; + + export type EventNames = Virtual.EventNames; + export type Event = Virtual.Event< + config, + name + >; + export type Context = Virtual.Context< + config, + schema, + name + >; + export type ApiContext = Virtual.Drizzle; + export type IndexingFunctionArgs = + Virtual.IndexingFunctionArgs; + export type Schema = Virtual.Schema; +} diff --git a/examples/feature-api-functions/ponder.config.ts b/examples/feature-api-functions/ponder.config.ts new file mode 100644 index 000000000..32ae5a2ba --- /dev/null +++ b/examples/feature-api-functions/ponder.config.ts @@ -0,0 +1,21 @@ +import { createConfig } from "@ponder/core"; +import { http } from "viem"; +import { erc20ABI } from "./abis/erc20ABI"; + +export default createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(process.env.PONDER_RPC_URL_1), + }, + }, + contracts: { + ERC20: { + network: "mainnet", + abi: erc20ABI, + address: "0x32353A6C91143bfd6C7d363B546e62a9A2489A20", + startBlock: 13142655, + endBlock: 13150000, + }, + }, +}); diff --git a/examples/feature-api-functions/ponder.schema.ts b/examples/feature-api-functions/ponder.schema.ts new file mode 100644 index 000000000..a722500c6 --- /dev/null +++ b/examples/feature-api-functions/ponder.schema.ts @@ -0,0 +1,50 @@ +import { createSchema } from "@ponder/core"; + +export default createSchema((p) => ({ + Account: p.createTable({ + id: p.hex(), + balance: p.bigint(), + isOwner: p.boolean(), + + allowances: p.many("Allowance.ownerId"), + approvalOwnerEvents: p.many("ApprovalEvent.ownerId"), + approvalSpenderEvents: p.many("ApprovalEvent.spenderId"), + transferFromEvents: p.many("TransferEvent.fromId"), + transferToEvents: p.many("TransferEvent.toId"), + }), + Allowance: p.createTable({ + id: p.string(), + amount: p.bigint(), + + ownerId: p.hex().references("Account.id"), + spenderId: p.hex().references("Account.id"), + + owner: p.one("ownerId"), + spender: p.one("spenderId"), + }), + TransferEvent: p.createTable( + { + id: p.string(), + amount: p.bigint(), + timestamp: p.int(), + + fromId: p.hex().references("Account.id"), + toId: p.hex().references("Account.id"), + + from: p.one("fromId"), + to: p.one("toId"), + }, + { fromIdIndex: p.index("fromId") }, + ), + ApprovalEvent: p.createTable({ + id: p.string(), + amount: p.bigint(), + timestamp: p.int(), + + ownerId: p.hex().references("Account.id"), + spenderId: p.hex().references("Account.id"), + + owner: p.one("ownerId"), + spender: p.one("spenderId"), + }), +})); diff --git a/examples/feature-api-functions/src/api/index.ts b/examples/feature-api-functions/src/api/index.ts new file mode 100644 index 000000000..219dbde3b --- /dev/null +++ b/examples/feature-api-functions/src/api/index.ts @@ -0,0 +1,47 @@ +import { ponder } from "@/generated"; +import { count, desc, eq, graphql, or, replaceBigInts } from "@ponder/core"; +import { formatEther, getAddress } from "viem"; + +ponder.use("/graphql", graphql()); + +ponder.get("/count", async (c) => { + const result = await c.db + .select({ count: count() }) + .from(c.tables.TransferEvent); + + if (result.length === 0) return c.text("0"); + return c.text(String(result[0]!.count)); +}); + +ponder.get("/count/:address", async (c) => { + const account = getAddress(c.req.param("address")); + const { TransferEvent } = c.tables; + + const result = await c.db + .select({ count: count() }) + .from(c.tables.TransferEvent) + .where( + or(eq(TransferEvent.fromId, account), eq(TransferEvent.toId, account)), + ); + + if (result.length === 0) return c.text("0"); + return c.text(String(result[0]!.count)); +}); + +ponder.get("/whale-transfers", async (c) => { + const { TransferEvent, Account } = c.tables; + + // Top 10 transfers from whale accounts + const result = await c.db + .select({ + amount: TransferEvent.amount, + senderBalance: Account.balance, + }) + .from(TransferEvent) + .innerJoin(Account, eq(TransferEvent.fromId, Account.id)) + .orderBy(desc(Account.balance)) + .limit(10); + + if (result.length === 0) return c.text("Not found", 500); + return c.json(replaceBigInts(result, (b) => formatEther(b))); +}); diff --git a/examples/feature-api-functions/src/index.ts b/examples/feature-api-functions/src/index.ts new file mode 100644 index 000000000..16bf33aa7 --- /dev/null +++ b/examples/feature-api-functions/src/index.ts @@ -0,0 +1,70 @@ +import { ponder } from "@/generated"; + +ponder.on("ERC20:Transfer", async ({ event, context }) => { + const { Account, TransferEvent } = context.db; + + // Create an Account for the sender, or update the balance if it already exists. + await Account.upsert({ + id: event.args.from, + create: { + balance: BigInt(0), + isOwner: false, + }, + update: ({ current }) => ({ + balance: current.balance - event.args.amount, + }), + }); + + // Create an Account for the recipient, or update the balance if it already exists. + await Account.upsert({ + id: event.args.to, + create: { + balance: event.args.amount, + isOwner: false, + }, + update: ({ current }) => ({ + balance: current.balance + event.args.amount, + }), + }); + + // Create a TransferEvent. + await TransferEvent.create({ + id: event.log.id, + data: { + fromId: event.args.from, + toId: event.args.to, + amount: event.args.amount, + timestamp: Number(event.block.timestamp), + }, + }); +}); + +ponder.on("ERC20:Approval", async ({ event, context }) => { + const { Allowance, ApprovalEvent } = context.db; + + const allowanceId = `${event.args.owner}-${event.args.spender}`; + + // Create or update the Allowance. + await Allowance.upsert({ + id: allowanceId, + create: { + ownerId: event.args.owner, + spenderId: event.args.spender, + amount: event.args.amount, + }, + update: { + amount: event.args.amount, + }, + }); + + // Create an ApprovalEvent. + await ApprovalEvent.create({ + id: event.log.id, + data: { + ownerId: event.args.owner, + spenderId: event.args.spender, + amount: event.args.amount, + timestamp: Number(event.block.timestamp), + }, + }); +}); diff --git a/examples/feature-api-functions/tsconfig.json b/examples/feature-api-functions/tsconfig.json new file mode 100644 index 000000000..592b9a939 --- /dev/null +++ b/examples/feature-api-functions/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + // Type checking + "strict": true, + "noUncheckedIndexedAccess": true, + + // Interop constraints + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "isolatedModules": true, + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + + // Language and environment + "moduleResolution": "bundler", + "module": "ESNext", + "noEmit": true, + "lib": ["ES2022"], + "target": "ES2022", + + // Skip type checking for node modules + "skipLibCheck": true + }, + "include": ["./**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/examples/feature-blocks/package.json b/examples/feature-blocks/package.json index 30ac311bb..0ee3320c7 100644 --- a/examples/feature-blocks/package.json +++ b/examples/feature-blocks/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-blocks/ponder-env.d.ts b/examples/feature-blocks/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-blocks/ponder-env.d.ts +++ b/examples/feature-blocks/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/feature-call-traces/package.json b/examples/feature-call-traces/package.json index 8cb804b97..47495ec9a 100644 --- a/examples/feature-call-traces/package.json +++ b/examples/feature-call-traces/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-call-traces/ponder-env.d.ts b/examples/feature-call-traces/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-call-traces/ponder-env.d.ts +++ b/examples/feature-call-traces/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/feature-factory/package.json b/examples/feature-factory/package.json index 910df7367..40a0a0f2d 100644 --- a/examples/feature-factory/package.json +++ b/examples/feature-factory/package.json @@ -13,6 +13,7 @@ "dependencies": { "@ponder/core": "workspace:*", "abitype": "^0.10.2", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-factory/ponder-env.d.ts b/examples/feature-factory/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-factory/ponder-env.d.ts +++ b/examples/feature-factory/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/feature-filter/package.json b/examples/feature-filter/package.json index 2f06346e6..7d67fddaf 100644 --- a/examples/feature-filter/package.json +++ b/examples/feature-filter/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-filter/ponder-env.d.ts b/examples/feature-filter/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-filter/ponder-env.d.ts +++ b/examples/feature-filter/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/feature-multichain/package.json b/examples/feature-multichain/package.json index 0ca701d42..242ffd29d 100644 --- a/examples/feature-multichain/package.json +++ b/examples/feature-multichain/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-multichain/ponder-env.d.ts b/examples/feature-multichain/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-multichain/ponder-env.d.ts +++ b/examples/feature-multichain/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/feature-proxy/package.json b/examples/feature-proxy/package.json index 01f8f305f..679e8bf32 100644 --- a/examples/feature-proxy/package.json +++ b/examples/feature-proxy/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-proxy/ponder-env.d.ts b/examples/feature-proxy/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-proxy/ponder-env.d.ts +++ b/examples/feature-proxy/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/feature-read-contract/package.json b/examples/feature-read-contract/package.json index 6a833a937..6107ee15f 100644 --- a/examples/feature-read-contract/package.json +++ b/examples/feature-read-contract/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/feature-read-contract/ponder-env.d.ts b/examples/feature-read-contract/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/feature-read-contract/ponder-env.d.ts +++ b/examples/feature-read-contract/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/project-friendtech/package.json b/examples/project-friendtech/package.json index 90cb0c488..de1ac3979 100644 --- a/examples/project-friendtech/package.json +++ b/examples/project-friendtech/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/project-friendtech/ponder-env.d.ts b/examples/project-friendtech/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/project-friendtech/ponder-env.d.ts +++ b/examples/project-friendtech/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/project-uniswap-v3-flash/package.json b/examples/project-uniswap-v3-flash/package.json index 01e060539..c6c8696b7 100644 --- a/examples/project-uniswap-v3-flash/package.json +++ b/examples/project-uniswap-v3-flash/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/project-uniswap-v3-flash/ponder-env.d.ts b/examples/project-uniswap-v3-flash/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/project-uniswap-v3-flash/ponder-env.d.ts +++ b/examples/project-uniswap-v3-flash/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/reference-erc1155/package.json b/examples/reference-erc1155/package.json index fbb297929..e72e4e9a8 100644 --- a/examples/reference-erc1155/package.json +++ b/examples/reference-erc1155/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/reference-erc1155/ponder-env.d.ts b/examples/reference-erc1155/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/reference-erc1155/ponder-env.d.ts +++ b/examples/reference-erc1155/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/reference-erc20/package.json b/examples/reference-erc20/package.json index 2398e90c5..dcf42502f 100644 --- a/examples/reference-erc20/package.json +++ b/examples/reference-erc20/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/reference-erc20/ponder-env.d.ts b/examples/reference-erc20/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/reference-erc20/ponder-env.d.ts +++ b/examples/reference-erc20/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/reference-erc4626/package.json b/examples/reference-erc4626/package.json index b504c9b72..9c7a2ffeb 100644 --- a/examples/reference-erc4626/package.json +++ b/examples/reference-erc4626/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/reference-erc4626/ponder-env.d.ts b/examples/reference-erc4626/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/reference-erc4626/ponder-env.d.ts +++ b/examples/reference-erc4626/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/reference-erc721/package.json b/examples/reference-erc721/package.json index 879b2ab64..edd6b2abe 100644 --- a/examples/reference-erc721/package.json +++ b/examples/reference-erc721/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/reference-erc721/ponder-env.d.ts b/examples/reference-erc721/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/reference-erc721/ponder-env.d.ts +++ b/examples/reference-erc721/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/with-foundry/ponder/package.json b/examples/with-foundry/ponder/package.json index 5811c4c65..1f57f0646 100644 --- a/examples/with-foundry/ponder/package.json +++ b/examples/with-foundry/ponder/package.json @@ -11,6 +11,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.3" }, "devDependencies": { diff --git a/examples/with-foundry/ponder/ponder-env.d.ts b/examples/with-foundry/ponder/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/with-foundry/ponder/ponder-env.d.ts +++ b/examples/with-foundry/ponder/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/with-nextjs/ponder/package.json b/examples/with-nextjs/ponder/package.json index 83a129df0..fd47071e8 100644 --- a/examples/with-nextjs/ponder/package.json +++ b/examples/with-nextjs/ponder/package.json @@ -11,6 +11,7 @@ }, "dependencies": { "@ponder/core": "workspace:*", + "hono": "^4.5.0", "viem": "^1.19.9" }, "devDependencies": { diff --git a/examples/with-nextjs/ponder/ponder-env.d.ts b/examples/with-nextjs/ponder/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/examples/with-nextjs/ponder/ponder-env.d.ts +++ b/examples/with-nextjs/ponder/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/examples/with-trpc/client/env.d.ts b/examples/with-trpc/client/env.d.ts new file mode 100644 index 000000000..74e33e49d --- /dev/null +++ b/examples/with-trpc/client/env.d.ts @@ -0,0 +1 @@ +/// diff --git a/examples/with-trpc/client/index.ts b/examples/with-trpc/client/index.ts new file mode 100644 index 000000000..df33d8e8e --- /dev/null +++ b/examples/with-trpc/client/index.ts @@ -0,0 +1,17 @@ +import { createTRPCProxyClient, httpBatchLink } from "@trpc/client"; +import type { AppRouter } from "../ponder/src/api/index"; + +const client = createTRPCProxyClient({ + links: [ + httpBatchLink({ + url: "http://localhost:42069/trpc", + }), + ], +}); + +const response = await client.hello.query( + // ^? + "0xC1894e6a52c4C7Ac5b2e0b25583Ea48bf45DA14a", +); + +console.log(response); diff --git a/examples/with-trpc/client/package.json b/examples/with-trpc/client/package.json new file mode 100644 index 000000000..02f06cc8f --- /dev/null +++ b/examples/with-trpc/client/package.json @@ -0,0 +1,12 @@ +{ + "name": "ponder-examples-with-trpc-client", + "private": true, + "type": "module", + "scripts": {}, + "dependencies": { + "@trpc/client": "^10.45.2" + }, + "engines": { + "node": ">=18.14" + } +} diff --git a/examples/with-trpc/client/tsconfig.json b/examples/with-trpc/client/tsconfig.json new file mode 100644 index 000000000..592b9a939 --- /dev/null +++ b/examples/with-trpc/client/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + // Type checking + "strict": true, + "noUncheckedIndexedAccess": true, + + // Interop constraints + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "isolatedModules": true, + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + + // Language and environment + "moduleResolution": "bundler", + "module": "ESNext", + "noEmit": true, + "lib": ["ES2022"], + "target": "ES2022", + + // Skip type checking for node modules + "skipLibCheck": true + }, + "include": ["./**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/examples/with-trpc/package.json b/examples/with-trpc/package.json new file mode 100644 index 000000000..4873fe900 --- /dev/null +++ b/examples/with-trpc/package.json @@ -0,0 +1,8 @@ +{ + "name": "ponder-examples-with-trpc", + "private": true, + "type": "module", + "engines": { + "node": ">=18.14" + } +} diff --git a/examples/with-trpc/ponder/.env.example b/examples/with-trpc/ponder/.env.example new file mode 100644 index 000000000..f7745c21c --- /dev/null +++ b/examples/with-trpc/ponder/.env.example @@ -0,0 +1,5 @@ +# Mainnet RPC URL used for fetching blockchain data. Alchemy is recommended. +PONDER_RPC_URL_1=https://eth-mainnet.g.alchemy.com/v2/... + +# (Optional) Postgres database URL. If not provided, SQLite will be used. +DATABASE_URL= \ No newline at end of file diff --git a/examples/with-trpc/ponder/.eslintrc.json b/examples/with-trpc/ponder/.eslintrc.json new file mode 100644 index 000000000..359e2bbfa --- /dev/null +++ b/examples/with-trpc/ponder/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "ponder" +} diff --git a/examples/with-trpc/ponder/.gitignore b/examples/with-trpc/ponder/.gitignore new file mode 100644 index 000000000..f0c7e1177 --- /dev/null +++ b/examples/with-trpc/ponder/.gitignore @@ -0,0 +1,18 @@ +# Dependencies +/node_modules + +# Debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# Misc +.DS_Store + +# Env files +.env*.local + +# Ponder +/generated/ +/.ponder/ diff --git a/examples/with-trpc/ponder/abis/erc20ABI.ts b/examples/with-trpc/ponder/abis/erc20ABI.ts new file mode 100644 index 000000000..94cbc6a33 --- /dev/null +++ b/examples/with-trpc/ponder/abis/erc20ABI.ts @@ -0,0 +1,147 @@ +export const erc20ABI = [ + { + stateMutability: "view", + type: "function", + inputs: [], + name: "DOMAIN_SEPARATOR", + outputs: [{ name: "", internalType: "bytes32", type: "bytes32" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [ + { name: "", internalType: "address", type: "address" }, + { name: "", internalType: "address", type: "address" }, + ], + name: "allowance", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "spender", internalType: "address", type: "address" }, + { name: "amount", internalType: "uint256", type: "uint256" }, + ], + name: "approve", + outputs: [{ name: "", internalType: "bool", type: "bool" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [{ name: "", internalType: "address", type: "address" }], + name: "balanceOf", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "decimals", + outputs: [{ name: "", internalType: "uint8", type: "uint8" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "name", + outputs: [{ name: "", internalType: "string", type: "string" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [{ name: "", internalType: "address", type: "address" }], + name: "nonces", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "owner", internalType: "address", type: "address" }, + { name: "spender", internalType: "address", type: "address" }, + { name: "value", internalType: "uint256", type: "uint256" }, + { name: "deadline", internalType: "uint256", type: "uint256" }, + { name: "v", internalType: "uint8", type: "uint8" }, + { name: "r", internalType: "bytes32", type: "bytes32" }, + { name: "s", internalType: "bytes32", type: "bytes32" }, + ], + name: "permit", + outputs: [], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "symbol", + outputs: [{ name: "", internalType: "string", type: "string" }], + }, + { + stateMutability: "view", + type: "function", + inputs: [], + name: "totalSupply", + outputs: [{ name: "", internalType: "uint256", type: "uint256" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "to", internalType: "address", type: "address" }, + { name: "amount", internalType: "uint256", type: "uint256" }, + ], + name: "transfer", + outputs: [{ name: "", internalType: "bool", type: "bool" }], + }, + { + stateMutability: "nonpayable", + type: "function", + inputs: [ + { name: "from", internalType: "address", type: "address" }, + { name: "to", internalType: "address", type: "address" }, + { name: "amount", internalType: "uint256", type: "uint256" }, + ], + name: "transferFrom", + outputs: [{ name: "", internalType: "bool", type: "bool" }], + }, + { + type: "event", + anonymous: false, + inputs: [ + { + name: "owner", + internalType: "address", + type: "address", + indexed: true, + }, + { + name: "spender", + internalType: "address", + type: "address", + indexed: true, + }, + { + name: "amount", + internalType: "uint256", + type: "uint256", + indexed: false, + }, + ], + name: "Approval", + }, + { + type: "event", + anonymous: false, + inputs: [ + { name: "from", internalType: "address", type: "address", indexed: true }, + { name: "to", internalType: "address", type: "address", indexed: true }, + { + name: "amount", + internalType: "uint256", + type: "uint256", + indexed: false, + }, + ], + name: "Transfer", + }, +] as const; diff --git a/examples/with-trpc/ponder/package.json b/examples/with-trpc/ponder/package.json new file mode 100644 index 000000000..e0f3aa491 --- /dev/null +++ b/examples/with-trpc/ponder/package.json @@ -0,0 +1,29 @@ +{ + "private": true, + "name": "ponder-examples-with-trpc-ponder", + "type": "module", + "scripts": { + "dev": "ponder dev", + "start": "ponder start", + "codegen": "ponder codegen", + "lint": "eslint .", + "typecheck": "tsc" + }, + "dependencies": { + "@hono/trpc-server": "^0.3.2", + "@ponder/core": "workspace:*", + "@trpc/server": "^10.45.2", + "hono": "^4.5.0", + "viem": "^1.19.9", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/node": "^20.10.0", + "eslint": "^8.54.0", + "eslint-config-ponder": "workspace:*", + "typescript": "^5.3.2" + }, + "engines": { + "node": ">=18.14" + } +} diff --git a/examples/with-trpc/ponder/ponder-env.d.ts b/examples/with-trpc/ponder/ponder-env.d.ts new file mode 100644 index 000000000..03126bf92 --- /dev/null +++ b/examples/with-trpc/ponder/ponder-env.d.ts @@ -0,0 +1,28 @@ +// This file enables type checking and editor autocomplete for this Ponder project. +// After upgrading, you may find that changes have been made to this file. +// If this happens, please commit the changes. Do not manually edit this file. +// See https://ponder.sh/docs/getting-started/installation#typescript for more information. + +declare module "@/generated" { + import type { Virtual } from "@ponder/core"; + + type config = typeof import("./ponder.config.ts").default; + type schema = typeof import("./ponder.schema.ts").default; + + export const ponder: Virtual.Registry; + + export type EventNames = Virtual.EventNames; + export type Event = Virtual.Event< + config, + name + >; + export type Context = Virtual.Context< + config, + schema, + name + >; + export type ApiContext = Virtual.Drizzle; + export type IndexingFunctionArgs = + Virtual.IndexingFunctionArgs; + export type Schema = Virtual.Schema; +} diff --git a/examples/with-trpc/ponder/ponder.config.ts b/examples/with-trpc/ponder/ponder.config.ts new file mode 100644 index 000000000..32ae5a2ba --- /dev/null +++ b/examples/with-trpc/ponder/ponder.config.ts @@ -0,0 +1,21 @@ +import { createConfig } from "@ponder/core"; +import { http } from "viem"; +import { erc20ABI } from "./abis/erc20ABI"; + +export default createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(process.env.PONDER_RPC_URL_1), + }, + }, + contracts: { + ERC20: { + network: "mainnet", + abi: erc20ABI, + address: "0x32353A6C91143bfd6C7d363B546e62a9A2489A20", + startBlock: 13142655, + endBlock: 13150000, + }, + }, +}); diff --git a/examples/with-trpc/ponder/ponder.schema.ts b/examples/with-trpc/ponder/ponder.schema.ts new file mode 100644 index 000000000..a722500c6 --- /dev/null +++ b/examples/with-trpc/ponder/ponder.schema.ts @@ -0,0 +1,50 @@ +import { createSchema } from "@ponder/core"; + +export default createSchema((p) => ({ + Account: p.createTable({ + id: p.hex(), + balance: p.bigint(), + isOwner: p.boolean(), + + allowances: p.many("Allowance.ownerId"), + approvalOwnerEvents: p.many("ApprovalEvent.ownerId"), + approvalSpenderEvents: p.many("ApprovalEvent.spenderId"), + transferFromEvents: p.many("TransferEvent.fromId"), + transferToEvents: p.many("TransferEvent.toId"), + }), + Allowance: p.createTable({ + id: p.string(), + amount: p.bigint(), + + ownerId: p.hex().references("Account.id"), + spenderId: p.hex().references("Account.id"), + + owner: p.one("ownerId"), + spender: p.one("spenderId"), + }), + TransferEvent: p.createTable( + { + id: p.string(), + amount: p.bigint(), + timestamp: p.int(), + + fromId: p.hex().references("Account.id"), + toId: p.hex().references("Account.id"), + + from: p.one("fromId"), + to: p.one("toId"), + }, + { fromIdIndex: p.index("fromId") }, + ), + ApprovalEvent: p.createTable({ + id: p.string(), + amount: p.bigint(), + timestamp: p.int(), + + ownerId: p.hex().references("Account.id"), + spenderId: p.hex().references("Account.id"), + + owner: p.one("ownerId"), + spender: p.one("spenderId"), + }), +})); diff --git a/examples/with-trpc/ponder/src/api/index.ts b/examples/with-trpc/ponder/src/api/index.ts new file mode 100644 index 000000000..709808d87 --- /dev/null +++ b/examples/with-trpc/ponder/src/api/index.ts @@ -0,0 +1,33 @@ +import { type ApiContext, ponder } from "@/generated"; +import { trpcServer } from "@hono/trpc-server"; +import { eq } from "@ponder/core"; +import { initTRPC } from "@trpc/server"; +import type { Address } from "viem"; +import { z } from "zod"; + +const t = initTRPC.context().create(); + +const appRouter = t.router({ + hello: t.procedure.input(z.string()).query(async ({ input, ctx }) => { + const { Account } = ctx.tables; + + const account = await ctx.db + .select({ balance: Account.balance }) + .from(Account) + .where(eq(Account.id, input as Address)) + .limit(1); + + if (account.length === 0) return null; + return account[0]!.balance.toString(); + }), +}); + +export type AppRouter = typeof appRouter; + +ponder.use( + "/trpc/*", + trpcServer({ + router: appRouter, + createContext: (_, c) => c.var, + }), +); diff --git a/examples/with-trpc/ponder/src/index.ts b/examples/with-trpc/ponder/src/index.ts new file mode 100644 index 000000000..16bf33aa7 --- /dev/null +++ b/examples/with-trpc/ponder/src/index.ts @@ -0,0 +1,70 @@ +import { ponder } from "@/generated"; + +ponder.on("ERC20:Transfer", async ({ event, context }) => { + const { Account, TransferEvent } = context.db; + + // Create an Account for the sender, or update the balance if it already exists. + await Account.upsert({ + id: event.args.from, + create: { + balance: BigInt(0), + isOwner: false, + }, + update: ({ current }) => ({ + balance: current.balance - event.args.amount, + }), + }); + + // Create an Account for the recipient, or update the balance if it already exists. + await Account.upsert({ + id: event.args.to, + create: { + balance: event.args.amount, + isOwner: false, + }, + update: ({ current }) => ({ + balance: current.balance + event.args.amount, + }), + }); + + // Create a TransferEvent. + await TransferEvent.create({ + id: event.log.id, + data: { + fromId: event.args.from, + toId: event.args.to, + amount: event.args.amount, + timestamp: Number(event.block.timestamp), + }, + }); +}); + +ponder.on("ERC20:Approval", async ({ event, context }) => { + const { Allowance, ApprovalEvent } = context.db; + + const allowanceId = `${event.args.owner}-${event.args.spender}`; + + // Create or update the Allowance. + await Allowance.upsert({ + id: allowanceId, + create: { + ownerId: event.args.owner, + spenderId: event.args.spender, + amount: event.args.amount, + }, + update: { + amount: event.args.amount, + }, + }); + + // Create an ApprovalEvent. + await ApprovalEvent.create({ + id: event.log.id, + data: { + ownerId: event.args.owner, + spenderId: event.args.spender, + amount: event.args.amount, + timestamp: Number(event.block.timestamp), + }, + }); +}); diff --git a/examples/with-trpc/ponder/tsconfig.json b/examples/with-trpc/ponder/tsconfig.json new file mode 100644 index 000000000..592b9a939 --- /dev/null +++ b/examples/with-trpc/ponder/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + // Type checking + "strict": true, + "noUncheckedIndexedAccess": true, + + // Interop constraints + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "isolatedModules": true, + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + + // Language and environment + "moduleResolution": "bundler", + "module": "ESNext", + "noEmit": true, + "lib": ["ES2022"], + "target": "ES2022", + + // Skip type checking for node modules + "skipLibCheck": true + }, + "include": ["./**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/package.json b/package.json index 3e1447e3f..daa7d404a 100644 --- a/package.json +++ b/package.json @@ -17,13 +17,17 @@ "@biomejs/biome": "^1.8.1", "@changesets/changelog-github": "^0.4.8", "@changesets/cli": "^2.26.2", + "hono": "4.5.0", "lint-staged": "^15.1.0", "simple-git-hooks": "^2.9.0", "typescript": "5.0.4", "viem": "1.16.0" }, "lint-staged": { - "*.ts": ["biome format --no-errors-on-unmatched --write", "biome check"], + "*.ts": [ + "biome format --no-errors-on-unmatched --write", + "biome check --no-errors-on-unmatched" + ], "!(*.ts)": ["biome format --no-errors-on-unmatched --write"] }, "simple-git-hooks": { @@ -32,7 +36,6 @@ "packageManager": "pnpm@8.6.10", "pnpm": { "patchedDependencies": { - "graphql@16.8.1": "patches/graphql@16.8.1.patch", "detect-package-manager@3.0.1": "patches/detect-package-manager@3.0.1.patch" }, "peerDependencyRules": { diff --git a/packages/core/package.json b/packages/core/package.json index 7799620c9..7ee14e36b 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -33,6 +33,7 @@ "typecheck": "tsc --noEmit" }, "peerDependencies": { + "hono": ">=4.5", "typescript": ">=5.0.4", "viem": ">=1.16" }, @@ -56,17 +57,15 @@ "dataloader": "^2.2.2", "detect-package-manager": "^3.0.1", "dotenv": "^16.3.1", + "drizzle-orm": "^0.31.2", "emittery": "^1.0.1", "ethereum-bloom-filters": "^1.0.10", "glob": "^10.3.10", "graphql": "^16.8.1", - "graphql-type-json": "^0.3.2", "graphql-yoga": "^5.3.0", - "hono": "^4.4.2", "http-terminator": "^3.2.0", "ink": "^4.4.1", "kysely": "^0.26.3", - "magic-string": "^0.30.5", "p-queue": "^7.4.1", "pg": "^8.11.3", "pg-connection-string": "^2.6.2", diff --git a/packages/core/src/_test/e2e/erc20/src/api/index.ts b/packages/core/src/_test/e2e/erc20/src/api/index.ts new file mode 100644 index 000000000..5cf910996 --- /dev/null +++ b/packages/core/src/_test/e2e/erc20/src/api/index.ts @@ -0,0 +1,11 @@ +// @ts-ignore +import { ponder } from "@/generated"; +import { graphql } from "@/index.js"; + +// biome-ignore lint/suspicious/noRedeclare: :) +declare const ponder: import("@/index.js").Virtual.Registry< + typeof import("../../ponder.config.js").default, + typeof import("../../ponder.schema.js").default +>; + +ponder.use("/graphql", graphql()); diff --git a/packages/core/src/_test/e2e/factory/src/api/index.ts b/packages/core/src/_test/e2e/factory/src/api/index.ts new file mode 100644 index 000000000..5cf910996 --- /dev/null +++ b/packages/core/src/_test/e2e/factory/src/api/index.ts @@ -0,0 +1,11 @@ +// @ts-ignore +import { ponder } from "@/generated"; +import { graphql } from "@/index.js"; + +// biome-ignore lint/suspicious/noRedeclare: :) +declare const ponder: import("@/index.js").Virtual.Registry< + typeof import("../../ponder.config.js").default, + typeof import("../../ponder.schema.js").default +>; + +ponder.use("/graphql", graphql()); diff --git a/packages/core/src/_test/utils.ts b/packages/core/src/_test/utils.ts index 3d5b2a053..bcfdff32b 100644 --- a/packages/core/src/_test/utils.ts +++ b/packages/core/src/_test/utils.ts @@ -696,8 +696,10 @@ export async function waitForIndexedBlock( const interval = setInterval(async () => { const response = await fetch(`http://localhost:${port}/status`); if (response.status === 200) { - const status = (await response.json()) as Status; - const statusBlockNumber = status[networkName]?.block?.number; + const status = (await response.json()) as Status | null; + const statusBlockNumber = status + ? status[networkName]?.block?.number + : undefined; if ( statusBlockNumber !== undefined && statusBlockNumber >= blockNumber diff --git a/packages/core/src/bin/commands/codegen.ts b/packages/core/src/bin/commands/codegen.ts index b61e2c7dd..ad49b0864 100644 --- a/packages/core/src/bin/commands/codegen.ts +++ b/packages/core/src/bin/commands/codegen.ts @@ -40,13 +40,13 @@ export async function codegen({ cliOptions }: { cliOptions: CliOptions }) { const shutdown = setupShutdown({ common, cleanup }); - const buildResult = await buildService.start({ watch: false }); + const { indexing } = await buildService.start({ watch: false }); - if (buildResult.status === "error") { + if (indexing.status === "error") { logger.error({ service: "process", msg: "Failed schema build", - error: buildResult.error, + error: indexing.error, }); await shutdown({ reason: "Failed schema build", code: 1 }); return; @@ -57,7 +57,7 @@ export async function codegen({ cliOptions }: { cliOptions: CliOptions }) { properties: { cli_command: "codegen" }, }); - runCodegen({ common, graphqlSchema: buildResult.build.graphqlSchema }); + runCodegen({ common, graphqlSchema: indexing.build.graphqlSchema }); logger.info({ service: "codegen", msg: "Wrote ponder-env.d.ts" }); logger.info({ service: "codegen", msg: "Wrote schema.graphql" }); diff --git a/packages/core/src/bin/commands/dev.ts b/packages/core/src/bin/commands/dev.ts index 46f0494d8..249bbcf98 100644 --- a/packages/core/src/bin/commands/dev.ts +++ b/packages/core/src/bin/commands/dev.ts @@ -1,6 +1,10 @@ import { existsSync } from "node:fs"; import path from "node:path"; -import { type BuildResult, createBuildService } from "@/build/index.js"; +import { + type ApiBuildResult, + type IndexingBuildResult, + createBuildService, +} from "@/build/index.js"; import { createLogger } from "@/common/logger.js"; import { MetricsService } from "@/common/metrics.js"; import { buildOptions } from "@/common/options.js"; @@ -9,6 +13,7 @@ import { UiService } from "@/ui/service.js"; import { createQueue } from "@ponder/common"; import type { CliOptions } from "../ponder.js"; import { run } from "../utils/run.js"; +import { runServer } from "../utils/runServer.js"; import { setupShutdown } from "../utils/shutdown.js"; export async function dev({ cliOptions }: { cliOptions: CliOptions }) { @@ -52,10 +57,12 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { const uiService = new UiService({ common }); - let cleanupReloadable = () => Promise.resolve(); + let indexingCleanupReloadable = () => Promise.resolve(); + let apiCleanupReloadable = () => Promise.resolve(); const cleanup = async () => { - await cleanupReloadable(); + await indexingCleanupReloadable(); + await apiCleanupReloadable(); await buildService.kill(); await telemetry.kill(); uiService.kill(); @@ -63,57 +70,85 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { const shutdown = setupShutdown({ common, cleanup }); - const buildQueue = createQueue({ + const indexingBuildQueue = createQueue({ initialStart: true, concurrency: 1, - worker: async (result: BuildResult) => { - await cleanupReloadable(); + worker: async (result: IndexingBuildResult) => { + await indexingCleanupReloadable(); if (result.status === "success") { uiService.reset(); metrics.resetMetrics(); - cleanupReloadable = await run({ + indexingCleanupReloadable = await run({ common, build: result.build, onFatalError: () => { shutdown({ reason: "Received fatal error", code: 1 }); }, onReloadableError: (error) => { - buildQueue.clear(); - buildQueue.add({ status: "error", error }); + indexingBuildQueue.clear(); + indexingBuildQueue.add({ status: "error", error }); }, }); } else { // This handles build failures and indexing errors on hot reload. uiService.setReloadableError(); - cleanupReloadable = () => Promise.resolve(); + indexingCleanupReloadable = () => Promise.resolve(); } }, }); - const initialResult = await buildService.start({ + const apiBuildQueue = createQueue({ + initialStart: true, + concurrency: 1, + worker: async (result: ApiBuildResult) => { + await apiCleanupReloadable(); + + if (result.status === "success") { + apiCleanupReloadable = await runServer({ + common, + build: result.build, + }); + } else { + // This handles build failures on hot reload. + uiService.setReloadableError(); + apiCleanupReloadable = () => Promise.resolve(); + } + }, + }); + + const { api, indexing } = await buildService.start({ watch: true, - onBuild: (buildResult) => { - buildQueue.clear(); - buildQueue.add(buildResult); + onIndexingBuild: (buildResult) => { + indexingBuildQueue.clear(); + indexingBuildQueue.add(buildResult); + }, + onApiBuild: (buildResult) => { + apiBuildQueue.clear(); + apiBuildQueue.add(buildResult); }, }); - if (initialResult.status === "error") { + if (indexing.status === "error" || api.status === "error") { await shutdown({ reason: "Failed intial build", code: 1 }); return cleanup; } telemetry.record({ name: "lifecycle:session_start", - properties: { cli_command: "dev", ...buildPayload(initialResult.build) }, + properties: { + cli_command: "dev", + ...buildPayload(indexing.build), + }, }); - buildQueue.add(initialResult); + indexingBuildQueue.add(indexing); + apiBuildQueue.add(api); return async () => { - buildQueue.pause(); + indexingBuildQueue.pause(); + apiBuildQueue.pause(); await cleanup(); }; } diff --git a/packages/core/src/bin/commands/serve.ts b/packages/core/src/bin/commands/serve.ts index 5513bd8bf..baf8d2d8c 100644 --- a/packages/core/src/bin/commands/serve.ts +++ b/packages/core/src/bin/commands/serve.ts @@ -5,9 +5,6 @@ import { MetricsService } from "@/common/metrics.js"; import { buildOptions } from "@/common/options.js"; import { buildPayload, createTelemetry } from "@/common/telemetry.js"; import { PostgresDatabaseService } from "@/database/postgres/service.js"; -import type { NamespaceInfo } from "@/database/service.js"; -import { getMetadataStore } from "@/indexing-store/metadata.js"; -import { getReadonlyStore } from "@/indexing-store/readonly.js"; import { createServer } from "@/server/service.js"; import type { CliOptions } from "../ponder.js"; import { setupShutdown } from "../utils/shutdown.js"; @@ -53,22 +50,24 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) { const shutdown = setupShutdown({ common, cleanup }); - const initialResult = await buildService.start({ watch: false }); + const { api, indexing } = await buildService.start({ watch: false }); // Once we have the initial build, we can kill the build service. await buildService.kill(); - if (initialResult.status === "error") { + if (api.status === "error" || indexing.status === "error") { await shutdown({ reason: "Failed intial build", code: 1 }); return cleanup; } telemetry.record({ name: "lifecycle:session_start", - properties: { cli_command: "serve", ...buildPayload(initialResult.build) }, + properties: { + cli_command: "serve", + ...buildPayload(indexing.build), + }, }); - const { databaseConfig, optionsConfig, schema, graphqlSchema } = - initialResult.build; + const { databaseConfig, optionsConfig, schema } = api.build; common.options = { ...common.options, ...optionsConfig }; @@ -98,32 +97,13 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) { isReadonly: true, }); - const readonlyStore = getReadonlyStore({ - encoding: "postgres", - schema, - // Note: `ponder serve` serves data from the `publishSchema`. Also, it does - // not need the other fields in NamespaceInfo because it only uses findUnique - // and findMany. We should ultimately add a PublicStore interface for this. - namespaceInfo: { - userNamespace: databaseConfig.publishSchema, - } as unknown as NamespaceInfo, - db: database.readonlyDb, - common, - }); - - const metadataStore = getMetadataStore({ - encoding: database.kind, - namespaceInfo: { - userNamespace: databaseConfig.publishSchema, - } as unknown as NamespaceInfo, - db: database.readonlyDb, - }); - const server = await createServer({ - graphqlSchema, + app: api.build.app, + routes: api.build.routes, common, - readonlyStore, - metadataStore, + schema, + database, + dbNamespace: databaseConfig.publishSchema, }); cleanupReloadable = async () => { diff --git a/packages/core/src/bin/commands/start.ts b/packages/core/src/bin/commands/start.ts index 9b318fd22..110698255 100644 --- a/packages/core/src/bin/commands/start.ts +++ b/packages/core/src/bin/commands/start.ts @@ -6,6 +6,7 @@ import { buildOptions } from "@/common/options.js"; import { buildPayload, createTelemetry } from "@/common/telemetry.js"; import type { CliOptions } from "../ponder.js"; import { run } from "../utils/run.js"; +import { runServer } from "../utils/runServer.js"; import { setupShutdown } from "../utils/shutdown.js"; export async function start({ cliOptions }: { cliOptions: CliOptions }) { @@ -41,31 +42,36 @@ export async function start({ cliOptions }: { cliOptions: CliOptions }) { const buildService = await createBuildService({ common }); let cleanupReloadable = () => Promise.resolve(); + let cleanupReloadableServer = () => Promise.resolve(); const cleanup = async () => { await cleanupReloadable(); + await cleanupReloadableServer(); await telemetry.kill(); }; const shutdown = setupShutdown({ common, cleanup }); - const initialResult = await buildService.start({ watch: false }); + const { indexing, api } = await buildService.start({ watch: false }); // Once we have the initial build, we can kill the build service. await buildService.kill(); - if (initialResult.status === "error") { + if (indexing.status === "error" || api.status === "error") { await shutdown({ reason: "Failed intial build", code: 1 }); return cleanup; } telemetry.record({ name: "lifecycle:session_start", - properties: { cli_command: "start", ...buildPayload(initialResult.build) }, + properties: { + cli_command: "start", + ...buildPayload(indexing.build), + }, }); cleanupReloadable = await run({ common, - build: initialResult.build, + build: indexing.build, onFatalError: () => { shutdown({ reason: "Received fatal error", code: 1 }); }, @@ -74,5 +80,10 @@ export async function start({ cliOptions }: { cliOptions: CliOptions }) { }, }); + cleanupReloadableServer = await runServer({ + common, + build: api.build, + }); + return cleanup; } diff --git a/packages/core/src/bin/utils/run.test.ts b/packages/core/src/bin/utils/run.test.ts index 4d428819f..72e961421 100644 --- a/packages/core/src/bin/utils/run.test.ts +++ b/packages/core/src/bin/utils/run.test.ts @@ -3,10 +3,10 @@ import { setupCommon, setupIsolatedDatabase, } from "@/_test/setup.js"; -import type { Build } from "@/build/index.js"; +import type { IndexingBuild } from "@/build/index.js"; import * as codegen from "@/common/codegen.js"; +import { buildGraphQLSchema } from "@/graphql/buildGraphqlSchema.js"; import { createSchema } from "@/schema/schema.js"; -import { buildGraphqlSchema } from "@/server/graphql/buildGraphqlSchema.js"; import { promiseWithResolvers } from "@ponder/common"; import { beforeEach, expect, test, vi } from "vitest"; import { run } from "./run.js"; @@ -26,10 +26,10 @@ const schema = createSchema((p) => ({ }), })); -const graphqlSchema = buildGraphqlSchema(schema); +const graphqlSchema = buildGraphQLSchema(schema); test("run() kill", async (context) => { - const build: Build = { + const build: IndexingBuild = { buildId: "buildId", schema, graphqlSchema, @@ -59,7 +59,7 @@ test("run() setup", async (context) => { "Erc20:setup": vi.fn(), }; - const build: Build = { + const build: IndexingBuild = { buildId: "buildId", schema, graphqlSchema, @@ -88,7 +88,7 @@ test("run() setup error", async (context) => { }; const onReloadableErrorPromiseResolver = promiseWithResolvers(); - const build: Build = { + const build: IndexingBuild = { buildId: "buildId", schema, graphqlSchema, diff --git a/packages/core/src/bin/utils/run.ts b/packages/core/src/bin/utils/run.ts index f1fb889f1..14245e2ca 100644 --- a/packages/core/src/bin/utils/run.ts +++ b/packages/core/src/bin/utils/run.ts @@ -1,4 +1,4 @@ -import type { Build } from "@/build/index.js"; +import type { IndexingBuild } from "@/build/index.js"; import { runCodegen } from "@/common/codegen.js"; import type { Common } from "@/common/common.js"; import { PostgresDatabaseService } from "@/database/postgres/service.js"; @@ -10,7 +10,6 @@ import { getReadonlyStore } from "@/indexing-store/readonly.js"; import { getRealtimeStore } from "@/indexing-store/realtime.js"; import type { IndexingStore, Status } from "@/indexing-store/store.js"; import { createIndexingService } from "@/indexing/index.js"; -import { createServer } from "@/server/service.js"; import { PostgresSyncStore } from "@/sync-store/postgres/store.js"; import { SqliteSyncStore } from "@/sync-store/sqlite/store.js"; import type { SyncStore } from "@/sync-store/store.js"; @@ -41,7 +40,7 @@ export type RealtimeEvent = }; /** - * Starts the server, sync, and indexing services for the specified build. + * Starts the sync and indexing services for the specified build. */ export async function run({ common, @@ -50,7 +49,7 @@ export async function run({ onReloadableError, }: { common: Common; - build: Build; + build: IndexingBuild; onFatalError: (error: Error) => void; onReloadableError: (error: Error) => void; }) { @@ -60,8 +59,8 @@ export async function run({ optionsConfig, networks, sources, - schema, graphqlSchema, + schema, indexingFunctions, } = build; @@ -108,22 +107,6 @@ export async function run({ namespaceInfo, db: database.indexingDb, }); - await metadataStore.setStatus(status); - - const readonlyStore = getReadonlyStore({ - encoding: database.kind, - schema, - namespaceInfo, - db: database.readonlyDb, - common, - }); - - const server = await createServer({ - common, - graphqlSchema, - readonlyStore, - metadataStore, - }); // This can be a long-running operation, so it's best to do it after // starting the server so the app can become responsive more quickly. @@ -215,6 +198,14 @@ export async function run({ }, }); + const readonlyStore = getReadonlyStore({ + encoding: database.kind, + schema, + namespaceInfo, + db: database.indexingDb, + common, + }); + const historicalStore = getHistoricalStore({ encoding: database.kind, schema, @@ -340,14 +331,12 @@ export async function run({ const startPromise = start(); return async () => { - const serverPromise = server.kill(); indexingService.kill(); await syncService.kill(); realtimeQueue.pause(); realtimeQueue.clear(); await realtimeQueue.onIdle(); await startPromise; - await serverPromise; await database.kill(); }; } diff --git a/packages/core/src/bin/utils/runServer.ts b/packages/core/src/bin/utils/runServer.ts new file mode 100644 index 000000000..b2f62f159 --- /dev/null +++ b/packages/core/src/bin/utils/runServer.ts @@ -0,0 +1,50 @@ +import type { ApiBuild } from "@/build/index.js"; +import type { Common } from "@/common/common.js"; +import { PostgresDatabaseService } from "@/database/postgres/service.js"; +import type { DatabaseService } from "@/database/service.js"; +import { SqliteDatabaseService } from "@/database/sqlite/service.js"; +import { createServer } from "@/server/service.js"; + +/** + * Starts the server for the specified build. + */ +export async function runServer({ + common, + build, +}: { + common: Common; + build: ApiBuild; +}) { + const { databaseConfig, optionsConfig, schema } = build; + + common.options = { ...common.options, ...optionsConfig }; + + let database: DatabaseService; + + if (databaseConfig.kind === "sqlite") { + const { directory } = databaseConfig; + database = new SqliteDatabaseService({ common, directory }); + } else { + const { poolConfig, schema: userNamespace, publishSchema } = databaseConfig; + database = new PostgresDatabaseService({ + common, + poolConfig, + userNamespace, + publishSchema, + }); + } + + const server = await createServer({ + app: build.app, + routes: build.routes, + common, + schema, + database, + dbNamespace: + databaseConfig.kind === "sqlite" ? "public" : databaseConfig.schema, + }); + + return async () => { + await server.kill(); + }; +} diff --git a/packages/core/src/build/index.ts b/packages/core/src/build/index.ts index 366a47b05..5bbcc2c03 100644 --- a/packages/core/src/build/index.ts +++ b/packages/core/src/build/index.ts @@ -1,6 +1,12 @@ import { type Extend, extend } from "@/utils/extend.js"; import { create, kill, start } from "./service.js"; -import type { Build, BuildResult, Service } from "./service.js"; +import type { + ApiBuild, + ApiBuildResult, + IndexingBuild, + IndexingBuildResult, + Service, +} from "./service.js"; const methods = { start, kill }; @@ -8,4 +14,4 @@ export const createBuildService = extend(create, methods); export type BuildService = Extend; -export type { BuildResult, Build }; +export type { IndexingBuild, IndexingBuildResult, ApiBuild, ApiBuildResult }; diff --git a/packages/core/src/build/plugin.test.ts b/packages/core/src/build/plugin.test.ts deleted file mode 100644 index 52664ad2e..000000000 --- a/packages/core/src/build/plugin.test.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { expect, test } from "vitest"; - -import { regex, replaceStateless, shim } from "./plugin.js"; - -test("regex matches basic", () => { - const code = `import { ponder } from "@/generated";\n`; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches multiline", () => { - const code = - 'import { ponder } from "@/generated";\n' + - 'ponder.on("PrimitiveManager:Swap", async ({ event, context }) => {\n'; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches import including types before", () => { - const code = 'import { type Context, ponder } from "@/generated";\n'; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches import includinga types after", () => { - const code = 'import { ponder, type Context } from "@/generated";\n'; - - expect(regex.test(code)).toBe(true); - expect(code.replace(regex, shim).includes(shim)).toBe(true); -}); - -test("regex matches import including newlines", () => { - const code = - "import {\n" + "ponder,\n" + "type Context,\n" + '} from "@/generated";\n'; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches no trailing semicolon", () => { - const code = `import { ponder } from "@/generated"`; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches no trailing single quote import", () => { - const code = `import { ponder } from '@/generated'`; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches no trailing newline", () => { - const code = `import { ponder } from "@/generated";ponder.on("PrimitiveManager:Swap", async ({ event, context }) => {`; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); - -test("regex matches preceding import", () => { - const code = - `import {decodeEventLog} from "viem";\n` + - `import {ponder} from "@/generated";\n`; - - expect(regex.test(code)).toBe(true); - const s = replaceStateless(code); - expect(s.toString().includes(shim)).toBe(true); -}); diff --git a/packages/core/src/build/plugin.ts b/packages/core/src/build/plugin.ts index cca7bf2f5..7088e4a02 100644 --- a/packages/core/src/build/plugin.ts +++ b/packages/core/src/build/plugin.ts @@ -1,40 +1,41 @@ -import MagicString from "magic-string"; import type { Plugin } from "vite"; -export const regex = - /^import\s+\{[^}]*\bponder\b[^}]*\}\s+from\s+["']@\/generated["'];?.*$/gm; +const virtualModule = `import { Hono } from "hono"; -export const shim = `export let ponder = { +const ponderHono = { + routes: [], + get(...maybePathOrHandlers) { + this.routes.push({ method: "GET", pathOrHandlers: maybePathOrHandlers }); + return this; + }, + post(...maybePathOrHandlers) { + this.routes.push({ method: "POST", pathOrHandlers: maybePathOrHandlers }); + return this; + }, + use(...maybePathOrHandlers) { + this.routes.push({ method: "USE", pathOrHandlers: maybePathOrHandlers }); + return this; + }, +}; + +const ponder = { + ...ponderHono, + hono: new Hono(), fns: [], on(name, fn) { this.fns.push({ name, fn }); }, }; -`; -export function replaceStateless(code: string) { - const s = new MagicString(code); - // MagicString.replace calls regex.exec(), which increments `lastIndex` - // on a match. We have to set this back to zero to use the same regex - // multiple times. - regex.lastIndex = 0; - s.replace(regex, shim); - return s; -} +export { ponder }; +`; export const vitePluginPonder = (): Plugin => { return { name: "ponder", - transform: (code, id) => { - if (regex.test(code)) { - const s = replaceStateless(code); - const transformed = s.toString(); - const sourcemap = s.generateMap({ source: id }); - - return { code: transformed, map: sourcemap }; - } else { - return null; - } + load: (id) => { + if (id === "@/generated") return virtualModule; + return null; }, }; }; diff --git a/packages/core/src/build/service.ts b/packages/core/src/build/service.ts index eaa96dd22..aa02adec3 100644 --- a/packages/core/src/build/service.ts +++ b/packages/core/src/build/service.ts @@ -1,15 +1,18 @@ import { createHash } from "node:crypto"; -import { readFileSync } from "node:fs"; +import fs from "node:fs"; import path from "node:path"; import type { Common } from "@/common/common.js"; +import { BuildError } from "@/common/errors.js"; import type { Config, OptionsConfig } from "@/config/config.js"; import type { DatabaseConfig } from "@/config/database.js"; import type { Network } from "@/config/networks.js"; import type { EventSource } from "@/config/sources.js"; +import { buildGraphQLSchema } from "@/graphql/buildGraphqlSchema.js"; +import type { PonderRoutes } from "@/hono/index.js"; import type { Schema } from "@/schema/common.js"; -import { buildGraphqlSchema } from "@/server/graphql/buildGraphqlSchema.js"; import { glob } from "glob"; import type { GraphQLSchema } from "graphql"; +import type { Hono } from "hono"; import { type ViteDevServer, createServer } from "vite"; import { ViteNodeRunner } from "vite-node/client"; import { ViteNodeServer } from "vite-node/server"; @@ -30,7 +33,10 @@ const BUILD_ID_VERSION = "1"; export type Service = { // static common: Common; - srcRegex: RegExp; + indexingRegex: RegExp; + apiRegex: RegExp; + indexingPattern: string; + apiPattern: string; // vite viteDevServer: ViteDevServer; @@ -38,7 +44,7 @@ export type Service = { viteNodeRunner: ViteNodeRunner; }; -export type Build = { +type BaseBuild = { // Build ID for caching buildId: string; // Config @@ -49,22 +55,24 @@ export type Build = { // Schema schema: Schema; graphqlSchema: GraphQLSchema; - // Indexing functions +}; + +export type IndexingBuild = BaseBuild & { indexingFunctions: IndexingFunctions; }; -export type BuildResult = - | { status: "success"; build: Build } +export type ApiBuild = BaseBuild & { + app: Hono; + routes: PonderRoutes; +}; + +export type IndexingBuildResult = + | { status: "success"; build: IndexingBuild } | { status: "error"; error: Error }; -type RawBuild = { - config: { config: Config; contentHash: string }; - schema: { schema: Schema; contentHash: string }; - indexingFunctions: { - indexingFunctions: RawIndexingFunctions; - contentHash: string; - }; -}; +export type ApiBuildResult = + | { status: "success"; build: ApiBuild } + | { status: "error"; error: Error }; export const create = async ({ common, @@ -72,12 +80,28 @@ export const create = async ({ common: Common; }): Promise => { const escapeRegex = /[.*+?^${}()|[\]\\]/g; - const escapedSrcDir = common.options.srcDir + + const escapedIndexingDir = common.options.indexingDir // If on Windows, use a POSIX path for this regex. .replace(/\\/g, "/") // Escape special characters in the path. .replace(escapeRegex, "\\$&"); - const srcRegex = new RegExp(`^${escapedSrcDir}/.*\\.(ts|js)$`); + const indexingRegex = new RegExp(`^${escapedIndexingDir}/.*\\.(ts|js)$`); + + const escapedApiDir = common.options.apiDir + // If on Windows, use a POSIX path for this regex. + .replace(/\\/g, "/") + // Escape special characters in the path. + .replace(escapeRegex, "\\$&"); + const apiRegex = new RegExp(`^${escapedApiDir}/.*\\.(ts|js)$`); + + const indexingPattern = path + .join(common.options.indexingDir, "**/*.{js,mjs,ts,mts}") + .replace(/\\/g, "/"); + + const apiPattern = path + .join(common.options.apiDir, "**/*.{js,mjs,ts,mts}") + .replace(/\\/g, "/"); const viteLogger = { warnedMessages: new Set(), @@ -129,7 +153,10 @@ export const create = async ({ return { common, - srcRegex, + indexingRegex, + apiRegex, + indexingPattern, + apiPattern, viteDevServer, viteNodeServer, viteNodeRunner, @@ -146,36 +173,54 @@ export const start = async ( buildService: Service, { watch, - onBuild, + onIndexingBuild, + onApiBuild, }: - | { watch: true; onBuild: (buildResult: BuildResult) => void } - | { watch: false; onBuild?: never }, -): Promise => { + | { + watch: true; + onIndexingBuild: (buildResult: IndexingBuildResult) => void; + onApiBuild: (buildResult: ApiBuildResult) => void; + } + | { watch: false; onIndexingBuild?: never; onApiBuild?: never }, +): Promise<{ indexing: IndexingBuildResult; api: ApiBuildResult }> => { const { common } = buildService; // Note: Don't run these in parallel. If there are circular imports in user code, // it's possible for ViteNodeRunner to return exports as undefined (a race condition). const configResult = await executeConfig(buildService); const schemaResult = await executeSchema(buildService); - const indexingFunctionsResult = await executeIndexingFunctions(buildService); + const indexingResult = await executeIndexingFunctions(buildService); + const apiResult = await executeApiRoutes(buildService); if (configResult.status === "error") { - return { status: "error", error: configResult.error }; + return { + indexing: { status: "error", error: configResult.error }, + api: { status: "error", error: configResult.error }, + }; } if (schemaResult.status === "error") { - return { status: "error", error: schemaResult.error }; + return { + indexing: { status: "error", error: schemaResult.error }, + api: { status: "error", error: schemaResult.error }, + }; } - if (indexingFunctionsResult.status === "error") { - return { status: "error", error: indexingFunctionsResult.error }; + if (indexingResult.status === "error") { + return { + indexing: { status: "error", error: indexingResult.error }, + api: { status: "error", error: indexingResult.error }, + }; + } + if (apiResult.status === "error") { + return { + indexing: { status: "error", error: apiResult.error }, + api: { status: "error", error: apiResult.error }, + }; } - const rawBuild: RawBuild = { - config: configResult, - schema: schemaResult, - indexingFunctions: indexingFunctionsResult, - }; - - const buildResult = await validateAndBuild(buildService, rawBuild); + let cachedConfigResult = configResult; + let cachedSchemaResult = schemaResult; + let cachedIndexingResult = indexingResult; + let cachedApiResult = apiResult; // If watch is false (`ponder start` or `ponder serve`), // don't register any event handlers on the watcher. @@ -224,13 +269,23 @@ export const start = async ( const hasSchemaUpdate = invalidated.includes( common.options.schemaFile.replace(/\\/g, "/"), ); - const hasIndexingFunctionUpdate = invalidated.some((file) => - buildService.srcRegex.test(file), + const hasIndexingUpdate = invalidated.some( + (file) => + buildService.indexingRegex.test(file) && + !buildService.apiRegex.test(file), + ); + const hasApiUpdate = invalidated.some((file) => + buildService.apiRegex.test(file), ); // This branch could trigger if you change a `note.txt` file within `src/`. // Note: We could probably do a better job filtering out files in `isFileIgnored`. - if (!hasConfigUpdate && !hasSchemaUpdate && !hasIndexingFunctionUpdate) { + if ( + !hasConfigUpdate && + !hasSchemaUpdate && + !hasIndexingUpdate && + !hasApiUpdate + ) { return; } @@ -244,38 +299,132 @@ export const start = async ( if (hasConfigUpdate) { const result = await executeConfig(buildService); if (result.status === "error") { - onBuild({ status: "error", error: result.error }); + onIndexingBuild({ status: "error", error: result.error }); return; } - rawBuild.config = result; + cachedConfigResult = result; } if (hasSchemaUpdate) { const result = await executeSchema(buildService); if (result.status === "error") { - onBuild({ status: "error", error: result.error }); + onIndexingBuild({ status: "error", error: result.error }); return; } - rawBuild.schema = result; + cachedSchemaResult = result; } - if (hasIndexingFunctionUpdate) { + if (hasIndexingUpdate) { + const files = glob.sync(buildService.indexingPattern, { + ignore: buildService.apiPattern, + }); + buildService.viteNodeRunner.moduleCache.invalidateDepTree(files); + buildService.viteNodeRunner.moduleCache.deleteByModuleId("@/generated"); + const result = await executeIndexingFunctions(buildService); if (result.status === "error") { - onBuild({ status: "error", error: result.error }); + onIndexingBuild({ status: "error", error: result.error }); + return; + } + cachedIndexingResult = result; + } + + if (hasApiUpdate) { + const files = glob.sync(buildService.apiPattern); + buildService.viteNodeRunner.moduleCache.invalidateDepTree(files); + buildService.viteNodeRunner.moduleCache.deleteByModuleId("@/generated"); + + const result = await executeApiRoutes(buildService); + if (result.status === "error") { + onApiBuild({ status: "error", error: result.error }); return; } - rawBuild.indexingFunctions = result; + cachedApiResult = result; } - const buildResult = await validateAndBuild(buildService, rawBuild); - onBuild(buildResult); + /** + * Build and validate updated indexing and api artifacts + * + * There are a few cases to handle: + * 1) config or schema is updated -> rebuild both api and indexing + * 2) indexing functions are updated -> rebuild indexing + * 3) api routes are updated -> rebuild api + * + * Note: the api build cannot be successful if the indexing + * build fails, this means that any indexing errors are always + * propogated to the api build. + */ + + const indexingBuildResult = await validateAndBuild( + buildService, + cachedConfigResult, + cachedSchemaResult, + cachedIndexingResult, + ); + if (indexingBuildResult.status === "error") { + onIndexingBuild(indexingBuildResult); + onApiBuild(indexingBuildResult); + return; + } + + // If schema or config is updated, rebuild both api and indexing + if (hasConfigUpdate || hasSchemaUpdate) { + onIndexingBuild(indexingBuildResult); + onApiBuild( + validateAndBuildApi( + buildService, + indexingBuildResult.build, + cachedApiResult, + ), + ); + } else { + if (hasIndexingUpdate) { + onIndexingBuild(indexingBuildResult); + } + + if (hasApiUpdate) { + onApiBuild( + validateAndBuildApi( + buildService, + indexingBuildResult.build, + cachedApiResult, + ), + ); + } + } }; buildService.viteDevServer.watcher.on("change", onFileChange); } - return buildResult; + // Build and validate initial indexing and server build. + // Note: the api build cannot be successful if the indexing + // build fails + + const initialBuildResult = await validateAndBuild( + buildService, + configResult, + schemaResult, + indexingResult, + ); + + if (initialBuildResult.status === "error") { + return { + indexing: { status: "error", error: initialBuildResult.error }, + api: { status: "error", error: initialBuildResult.error }, + }; + } + + const initialApiBuildResult = validateAndBuildApi( + buildService, + initialBuildResult.build, + apiResult, + ); + + return { + indexing: initialBuildResult, + api: initialApiBuildResult, + }; }; export const kill = async (buildService: Service): Promise => { @@ -354,11 +503,9 @@ const executeIndexingFunctions = async ( } | { status: "error"; error: Error } > => { - const pattern = path - .join(buildService.common.options.srcDir, "**/*.{js,mjs,ts,mts}") - .replace(/\\/g, "/"); - const files = glob.sync(pattern); - + const files = glob.sync(buildService.indexingPattern, { + ignore: buildService.apiPattern, + }); const executeResults = await Promise.all( files.map(async (file) => ({ ...(await executeFile(buildService, { file })), @@ -366,8 +513,6 @@ const executeIndexingFunctions = async ( })), ); - const indexingFunctions: RawIndexingFunctions = []; - for (const executeResult of executeResults) { if (executeResult.status === "error") { buildService.common.logger.error({ @@ -381,8 +526,6 @@ const executeIndexingFunctions = async ( return executeResult; } - - indexingFunctions.push(...(executeResult.exports?.ponder?.fns ?? [])); } // Note that we are only hashing the file contents, not the exports. This is @@ -390,7 +533,7 @@ const executeIndexingFunctions = async ( const hash = createHash("sha256"); for (const file of files) { try { - const contents = readFileSync(file, "utf-8"); + const contents = fs.readFileSync(file, "utf-8"); hash.update(contents); } catch (e) { buildService.common.logger.warn({ @@ -402,16 +545,69 @@ const executeIndexingFunctions = async ( } const contentHash = hash.digest("hex"); - return { status: "success", indexingFunctions, contentHash }; + const exports = await buildService.viteNodeRunner.executeId("@/generated"); + + return { + status: "success", + indexingFunctions: exports.ponder.fns, + contentHash, + }; +}; + +const executeApiRoutes = async ( + buildService: Service, +): Promise< + | { + status: "success"; + app: Hono; + routes: PonderRoutes; + } + | { status: "error"; error: Error } +> => { + const files = glob.sync(buildService.apiPattern); + const executeResults = await Promise.all( + files.map(async (file) => ({ + ...(await executeFile(buildService, { file })), + file, + })), + ); + + for (const executeResult of executeResults) { + if (executeResult.status === "error") { + buildService.common.logger.error({ + service: "build", + msg: `Error while executing '${path.relative( + buildService.common.options.rootDir, + executeResult.file, + )}':`, + error: executeResult.error, + }); + + return executeResult; + } + } + + const exports = await buildService.viteNodeRunner.executeId("@/generated"); + + return { + status: "success", + app: exports.ponder.hono, + routes: exports.ponder.routes, + }; }; const validateAndBuild = async ( { common }: Pick, - rawBuild: RawBuild, -): Promise => { + config: { config: Config; contentHash: string }, + schema: { schema: Schema; contentHash: string }, + indexingFunctions: { + indexingFunctions: RawIndexingFunctions; + contentHash: string; + }, +): Promise => { // Validate and build the schema const buildSchemaResult = safeBuildSchema({ - schema: rawBuild.schema.schema, + schema: schema.schema, }); if (buildSchemaResult.status === "error") { common.logger.error({ @@ -427,13 +623,13 @@ const validateAndBuild = async ( common.logger[log.level]({ service: "build", msg: log.msg }); } - const graphqlSchema = buildGraphqlSchema(buildSchemaResult.schema); + const graphqlSchema = buildGraphQLSchema(buildSchemaResult.schema); // Validates and build the config const buildConfigAndIndexingFunctionsResult = await safeBuildConfigAndIndexingFunctions({ - config: rawBuild.config.config, - rawIndexingFunctions: rawBuild.indexingFunctions.indexingFunctions, + config: config.config, + rawIndexingFunctions: indexingFunctions.indexingFunctions, options: common.options, }); if (buildConfigAndIndexingFunctionsResult.status === "error") { @@ -452,9 +648,9 @@ const validateAndBuild = async ( const buildId = createHash("sha256") .update(BUILD_ID_VERSION) - .update(rawBuild.config.contentHash) - .update(rawBuild.schema.contentHash) - .update(rawBuild.indexingFunctions.contentHash) + .update(config.contentHash) + .update(schema.contentHash) + .update(indexingFunctions.contentHash) .digest("hex") .slice(0, 10); @@ -479,6 +675,40 @@ const validateAndBuild = async ( }; }; +const validateAndBuildApi = ( + { common }: Pick, + baseBuild: BaseBuild, + api: { app: Hono; routes: PonderRoutes }, +): ApiBuildResult => { + for (const { + pathOrHandlers: [maybePathOrHandler], + } of api.routes) { + if (typeof maybePathOrHandler === "string") { + if ( + maybePathOrHandler === "/status" || + maybePathOrHandler === "/metrics" || + maybePathOrHandler === "/health" + ) { + const error = new BuildError( + `Validation failed: API route "${maybePathOrHandler}" is reserved for internal use.`, + ); + error.stack = undefined; + common.logger.error({ service: "build", msg: "Failed build", error }); + return { status: "error", error } as const; + } + } + } + + return { + status: "success", + build: { + ...baseBuild, + app: api.app, + routes: api.routes, + }, + }; +}; + const executeFile = async ( { common, viteNodeRunner }: Service, { file }: { file: string }, diff --git a/packages/core/src/common/codegen.ts b/packages/core/src/common/codegen.ts index a6c2ad396..59b5d6baa 100644 --- a/packages/core/src/common/codegen.ts +++ b/packages/core/src/common/codegen.ts @@ -1,8 +1,7 @@ import { mkdirSync, writeFileSync } from "node:fs"; import path from "node:path"; import type { Common } from "@/common/common.js"; -import type { GraphQLSchema } from "graphql"; -import { printSchema } from "graphql"; +import { type GraphQLSchema, printSchema } from "graphql"; export const ponderEnv = `// This file enables type checking and editor autocomplete for this Ponder project. // After upgrading, you may find that changes have been made to this file. @@ -27,6 +26,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/packages/core/src/common/errors.ts b/packages/core/src/common/errors.ts index c7689ba47..f0b13bc75 100644 --- a/packages/core/src/common/errors.ts +++ b/packages/core/src/common/errors.ts @@ -1,4 +1,4 @@ -class BaseError extends Error { +export class BaseError extends Error { override name = "BaseError"; meta: string[] = []; diff --git a/packages/core/src/common/options.ts b/packages/core/src/common/options.ts index 21b7f02c2..8fcb23123 100644 --- a/packages/core/src/common/options.ts +++ b/packages/core/src/common/options.ts @@ -9,7 +9,8 @@ export type Options = { configFile: string; schemaFile: string; rootDir: string; - srcDir: string; + indexingDir: string; + apiDir: string; generatedDir: string; ponderDir: string; logDir: string; @@ -18,10 +19,6 @@ export type Options = { hostname?: string; maxHealthcheckDuration: number; - graphqlMaxOperationTokens: number; - graphqlMaxOperationDepth: number; - graphqlMaxOperationAliases: number; - telemetryUrl: string; telemetryDisabled: boolean; telemetryConfigDir: string | undefined; @@ -82,7 +79,8 @@ export const buildOptions = ({ cliOptions }: { cliOptions: CliOptions }) => { rootDir, configFile: path.join(rootDir, cliOptions.config), schemaFile: path.join(rootDir, "ponder.schema.ts"), - srcDir: path.join(rootDir, "src"), + indexingDir: path.join(rootDir, "src"), + apiDir: path.join(rootDir, "src", "api"), generatedDir: path.join(rootDir, "generated"), ponderDir: path.join(rootDir, ".ponder"), logDir: path.join(rootDir, ".ponder", "logs"), @@ -91,12 +89,6 @@ export const buildOptions = ({ cliOptions }: { cliOptions: CliOptions }) => { hostname, maxHealthcheckDuration: 240, // 4 minutes - // Default limits are from Apollo: - // https://www.apollographql.com/blog/prevent-graph-misuse-with-operation-size-and-complexity-limits - graphqlMaxOperationTokens: 1000, - graphqlMaxOperationDepth: 100, - graphqlMaxOperationAliases: 30, - telemetryUrl: "https://ponder.sh/api/telemetry", telemetryDisabled: Boolean(process.env.PONDER_TELEMETRY_DISABLED), telemetryConfigDir: undefined, diff --git a/packages/core/src/common/telemetry.ts b/packages/core/src/common/telemetry.ts index 671e0389f..913f05141 100644 --- a/packages/core/src/common/telemetry.ts +++ b/packages/core/src/common/telemetry.ts @@ -4,7 +4,7 @@ import { existsSync, readFileSync } from "node:fs"; import os from "node:os"; import path from "node:path"; import { promisify } from "node:util"; -import type { Build } from "@/build/service.js"; +import type { IndexingBuild } from "@/build/service.js"; import type { Options } from "@/common/options.js"; import { getTables } from "@/schema/utils.js"; import { startClock } from "@/utils/timer.js"; @@ -269,7 +269,7 @@ function getPackageJson(rootDir: string) { } } -export function buildPayload(build: Build) { +export function buildPayload(build: IndexingBuild) { const table_count = Object.keys(getTables(build.schema)).length; const indexing_function_count = Object.values(build.indexingFunctions).reduce( (acc, f) => acc + Object.keys(f).length, diff --git a/packages/core/src/database/postgres/service.ts b/packages/core/src/database/postgres/service.ts index d00844bf5..352077f42 100644 --- a/packages/core/src/database/postgres/service.ts +++ b/packages/core/src/database/postgres/service.ts @@ -26,7 +26,7 @@ import { } from "@/utils/checkpoint.js"; import { formatEta } from "@/utils/format.js"; import { hash } from "@/utils/hash.js"; -import { createPool } from "@/utils/pg.js"; +import { createPool, createReadonlyPool } from "@/utils/pg.js"; import { wait } from "@/utils/wait.js"; import { type CreateTableBuilder, @@ -67,7 +67,7 @@ export class PostgresDatabaseService implements BaseDatabaseService { private internalPool: Pool; private syncPool: Pool; private indexingPool: Pool; - private readonlyPool: Pool; + readonlyPool: Pool; constructor({ common, @@ -108,7 +108,7 @@ export class PostgresDatabaseService implements BaseDatabaseService { application_name: `${userNamespace}_indexing`, max: indexingMax, }); - this.readonlyPool = createPool({ + this.readonlyPool = createReadonlyPool({ ...poolConfig, application_name: `${userNamespace}_readonly`, max: syncMax, diff --git a/packages/core/src/database/sqlite/service.ts b/packages/core/src/database/sqlite/service.ts index 02d0f178f..8fd82d0a9 100644 --- a/packages/core/src/database/sqlite/service.ts +++ b/packages/core/src/database/sqlite/service.ts @@ -24,7 +24,11 @@ import { } from "@/utils/checkpoint.js"; import { formatEta } from "@/utils/format.js"; import { hash } from "@/utils/hash.js"; -import { type SqliteDatabase, createSqliteDatabase } from "@/utils/sqlite.js"; +import { + type SqliteDatabase, + createReadonlySqliteDatabase, + createSqliteDatabase, +} from "@/utils/sqlite.js"; import { wait } from "@/utils/wait.js"; import { type CreateTableBuilder, @@ -53,6 +57,7 @@ export class SqliteDatabaseService implements BaseDatabaseService { private internalDatabase: SqliteDatabase; private syncDatabase: SqliteDatabase; + readonlyDatabase: SqliteDatabase; db: HeadlessKysely; readonlyDb: HeadlessKysely; @@ -91,6 +96,11 @@ export class SqliteDatabaseService implements BaseDatabaseService { `ATTACH DATABASE '${userDatabaseFile}' AS ${this.userNamespace}`, ); + this.readonlyDatabase = createReadonlySqliteDatabase(internalDatabaseFile); + this.readonlyDatabase.exec( + `ATTACH DATABASE '${userDatabaseFile}' AS ${this.userNamespace}`, + ); + this.db = new HeadlessKysely({ name: "internal", common, @@ -133,7 +143,7 @@ export class SqliteDatabaseService implements BaseDatabaseService { this.readonlyDb = new HeadlessKysely({ name: "readonly", common, - dialect: new SqliteDialect({ database: this.internalDatabase }), + dialect: new SqliteDialect({ database: this.readonlyDatabase }), log(event) { if (event.level === "query") { common.metrics.ponder_sqlite_query_total.inc({ diff --git a/packages/core/src/drizzle/bigint.ts b/packages/core/src/drizzle/bigint.ts new file mode 100644 index 000000000..0e499df67 --- /dev/null +++ b/packages/core/src/drizzle/bigint.ts @@ -0,0 +1,35 @@ +import { decodeToBigInt, encodeAsText } from "@/utils/encoding.js"; +import { entityKind } from "drizzle-orm"; +import { + type AnySQLiteTable, + SQLiteColumn, + SQLiteColumnBuilder, +} from "drizzle-orm/sqlite-core"; + +export class SQLiteBigintBuilder extends SQLiteColumnBuilder { + static readonly [entityKind]: string = "SQliteBigintBuilder"; + + constructor(columnName: string) { + super(columnName, "string", "SQLiteBigint"); + } + + build(table: AnySQLiteTable) { + return new SQLiteBigint(table, this.config); + } +} + +export class SQLiteBigint extends SQLiteColumn { + static readonly [entityKind]: string = "SQLiteBigint"; + + getSQLType(): string { + return "varchar(79)"; + } + + override mapFromDriverValue(value: string) { + return decodeToBigInt(value); + } + + override mapToDriverValue(value: bigint): string { + return encodeAsText(value as bigint); + } +} diff --git a/packages/core/src/drizzle/db.ts b/packages/core/src/drizzle/db.ts new file mode 100644 index 000000000..76b87d550 --- /dev/null +++ b/packages/core/src/drizzle/db.ts @@ -0,0 +1,29 @@ +import type { Column, SQLWrapper, SelectedFields, Table } from "drizzle-orm"; +import type { SelectBuilder } from "./select.js"; + +export type DrizzleDb = { + select(): SelectBuilder; + select>( + fields: TSelection, + ): SelectBuilder; + select( + fields?: SelectedFields, + ): SelectBuilder | undefined, "async", void>; + /** + * Execute a raw read-only SQL query.. + * + * @example + * import { ponder } from "@/generated"; + * import { sql } from "@ponder/core"; + * + * ponder.get("/", async (c) => { + * const result = await c.db.execute(sql`SELECT * from "Accounts"`); + * return c.json(result); + * }); + * + * @see https://orm.drizzle.team/docs/sql + */ + execute: >( + query: SQLWrapper, + ) => Promise; +}; diff --git a/packages/core/src/drizzle/hex.ts b/packages/core/src/drizzle/hex.ts new file mode 100644 index 000000000..40708fd7a --- /dev/null +++ b/packages/core/src/drizzle/hex.ts @@ -0,0 +1,68 @@ +import { entityKind } from "drizzle-orm"; +import { + type AnyPgTable, + PgColumn, + PgColumnBuilder, +} from "drizzle-orm/pg-core"; +import { + type AnySQLiteTable, + SQLiteColumn, + SQLiteColumnBuilder, +} from "drizzle-orm/sqlite-core"; +import { bytesToHex, hexToBytes } from "viem"; + +export class PgHexBuilder extends PgColumnBuilder { + static readonly [entityKind]: string = "PgHexBuilder"; + + constructor(columnName: string) { + super(columnName, "buffer", "PgHex"); + } + + build(table: AnyPgTable) { + return new PgHex(table, this.config); + } +} + +export class PgHex extends PgColumn { + static readonly [entityKind]: string = "PgHex"; + + getSQLType(): string { + return "bytea"; + } + + override mapFromDriverValue(value: Buffer) { + return bytesToHex(value); + } + + override mapToDriverValue(value: `0x${string}`): Buffer { + return Buffer.from(hexToBytes(value)); + } +} + +export class SQLiteHexBuilder extends SQLiteColumnBuilder { + static readonly [entityKind]: string = "SQliteHexBuilder"; + + constructor(columnName: string) { + super(columnName, "buffer", "SQLiteHex"); + } + + build(table: AnySQLiteTable) { + return new SQLiteHex(table, this.config); + } +} + +export class SQLiteHex extends SQLiteColumn { + static readonly [entityKind]: string = "SQLiteHex"; + + getSQLType(): string { + return "blob"; + } + + override mapFromDriverValue(value: Buffer) { + return bytesToHex(value); + } + + override mapToDriverValue(value: `0x${string}`): Buffer { + return Buffer.from(hexToBytes(value)); + } +} diff --git a/packages/core/src/drizzle/json.ts b/packages/core/src/drizzle/json.ts new file mode 100644 index 000000000..1503ce56f --- /dev/null +++ b/packages/core/src/drizzle/json.ts @@ -0,0 +1,34 @@ +import { entityKind } from "drizzle-orm"; +import { + type AnySQLiteTable, + SQLiteColumn, + SQLiteColumnBuilder, +} from "drizzle-orm/sqlite-core"; + +export class SQLiteJsonBuilder extends SQLiteColumnBuilder { + static readonly [entityKind]: string = "SQliteJsonBuilder"; + + constructor(columnName: string) { + super(columnName, "json", "SQLiteJson"); + } + + build(table: AnySQLiteTable) { + return new SQLiteJson(table, this.config); + } +} + +export class SQLiteJson extends SQLiteColumn { + static readonly [entityKind]: string = "SQLiteJson"; + + getSQLType(): string { + return "jsonb"; + } + + override mapFromDriverValue(value: string) { + return JSON.parse(value); + } + + override mapToDriverValue(value: object): string { + return JSON.stringify(value); + } +} diff --git a/packages/core/src/drizzle/list.ts b/packages/core/src/drizzle/list.ts new file mode 100644 index 000000000..2139c96ce --- /dev/null +++ b/packages/core/src/drizzle/list.ts @@ -0,0 +1,100 @@ +import type { Scalar } from "@/schema/common.js"; +import { entityKind } from "drizzle-orm"; +import { + type AnyPgTable, + PgColumn, + PgColumnBuilder, +} from "drizzle-orm/pg-core"; +import { + type AnySQLiteTable, + SQLiteColumn, + SQLiteColumnBuilder, +} from "drizzle-orm/sqlite-core"; + +export class PgListBuilder extends PgColumnBuilder { + static readonly [entityKind]: string = "PgListBuilder"; + element: Scalar; + + constructor(columnName: string, element: Scalar) { + super(columnName, "string", "PgList"); + this.element = element; + } + + build(table: AnyPgTable) { + return new PgList(table, this.config, this.element); + } +} + +export class PgList extends PgColumn { + static readonly [entityKind]: string = "PgList"; + element: Scalar; + + constructor( + table: AnyPgTable, + config: PgListBuilder["config"], + element: Scalar, + ) { + super(table, config); + this.element = element; + } + + getSQLType(): string { + return "text"; + } + + override mapFromDriverValue(value: string) { + return this.element === "bigint" + ? JSON.parse(value).map(BigInt) + : JSON.parse(value); + } + + override mapToDriverValue(value: Array): string { + return this.element === "bigint" + ? JSON.stringify(value.map(String)) + : JSON.stringify(value); + } +} + +export class SQLiteListBuilder extends SQLiteColumnBuilder { + static readonly [entityKind]: string = "SQliteListBuilder"; + element: Scalar; + + constructor(columnName: string, element: Scalar) { + super(columnName, "string", "PgList"); + this.element = element; + } + + build(table: AnySQLiteTable) { + return new SQLiteList(table, this.config, this.element); + } +} + +export class SQLiteList extends SQLiteColumn { + static readonly [entityKind]: string = "SQLiteList"; + element: Scalar; + + constructor( + table: AnyPgTable, + config: SQLiteListBuilder["config"], + element: Scalar, + ) { + super(table, config); + this.element = element; + } + + getSQLType(): string { + return "text"; + } + + override mapFromDriverValue(value: string) { + return this.element === "bigint" + ? JSON.parse(value).map(BigInt) + : JSON.parse(value); + } + + override mapToDriverValue(value: Array): string { + return this.element === "bigint" + ? JSON.stringify(value.map(String)) + : JSON.stringify(value); + } +} diff --git a/packages/core/src/drizzle/runtime.test.ts b/packages/core/src/drizzle/runtime.test.ts new file mode 100644 index 000000000..3e8978947 --- /dev/null +++ b/packages/core/src/drizzle/runtime.test.ts @@ -0,0 +1,267 @@ +import { + setupCommon, + setupDatabaseServices, + setupIsolatedDatabase, +} from "@/_test/setup.js"; +import type { Context } from "@/hono/context.js"; +import type { HistoricalStore } from "@/indexing-store/store.js"; +import { createSchema } from "@/schema/schema.js"; +import { eq } from "drizzle-orm"; +import { beforeEach, expect, test } from "vitest"; +import type { DrizzleDb } from "./db.js"; +import { createDrizzleDb, createDrizzleTables } from "./runtime.js"; + +beforeEach(setupCommon); +beforeEach(setupIsolatedDatabase); + +test("runtime select", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ tableName: "table", id: "kyle" }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db.select().from(drizzleTables.table); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ id: "kyle" }); + + await cleanup(); +}); + +test("select hex", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.hex(), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ tableName: "table", id: "0x1" }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db.select().from(drizzleTables.table); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ id: "0x01" }); + + await cleanup(); +}); + +test("select bigint", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.bigint(), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ tableName: "table", id: 1n }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db.select().from(drizzleTables.table); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ id: 1n }); + + await cleanup(); +}); + +test("select json", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + json: p.json(), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ + tableName: "table", + id: "1", + data: { + json: { + prop: 52, + }, + }, + }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db.select().from(drizzleTables.table); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ id: "1", json: { prop: 52 } }); + + await cleanup(); +}); + +test("select enum", async (context) => { + const schema = createSchema((p) => ({ + en: p.createEnum(["hi", "low"]), + table: p.createTable({ + id: p.string(), + en: p.enum("en"), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ + tableName: "table", + id: "1", + data: { en: "hi" }, + }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db.select().from(drizzleTables.table); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ id: "1", en: "hi" }); + + await cleanup(); +}); + +test("select list", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + list: p.string().list(), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ + tableName: "table", + id: "1", + data: { + list: ["big", "dog"], + }, + }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db.select().from(drizzleTables.table); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ id: "1", list: ["big", "dog"] }); + + await cleanup(); +}); + +test("select with join", async (context) => { + const schema = createSchema((p) => ({ + account: p.createTable({ + id: p.hex(), + name: p.string(), + age: p.int(), + }), + nft: p.createTable({ + id: p.bigint(), + owner: p.hex().references("account.id"), + }), + })); + + const { database, cleanup, indexingStore, namespaceInfo } = + await setupDatabaseServices(context, { schema }); + + await indexingStore.create({ + tableName: "account", + id: "0x1", + data: { + name: "kyle", + age: 52, + }, + }); + await indexingStore.create({ + tableName: "nft", + id: 10n, + data: { owner: "0x1" }, + }); + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const db = createDrizzleDb(database) as unknown as DrizzleDb; + + const drizzleTables = createDrizzleTables( + schema, + database, + namespaceInfo.userNamespace, + ) as Context["tables"]; + + const rows = await db + .select() + .from(drizzleTables.account) + .fullJoin( + drizzleTables.nft, + eq(drizzleTables.account.id, drizzleTables.nft.owner), + ); + + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ + account: { id: "0x01", name: "kyle", age: 52 }, + nft: { id: 10n, owner: "0x01" }, + }); + + await cleanup(); +}); diff --git a/packages/core/src/drizzle/runtime.ts b/packages/core/src/drizzle/runtime.ts new file mode 100644 index 000000000..33369d35a --- /dev/null +++ b/packages/core/src/drizzle/runtime.ts @@ -0,0 +1,256 @@ +import type { DatabaseService } from "@/database/service.js"; +import type { Scalar, Schema } from "@/schema/common.js"; +import { + isEnumColumn, + isJSONColumn, + isListColumn, + isMaterialColumn, + isOptionalColumn, + isReferenceColumn, + isScalarColumn, +} from "@/schema/utils.js"; +import { getTables } from "@/schema/utils.js"; +import { drizzle as drizzleSQLite } from "drizzle-orm/better-sqlite3"; +import { drizzle as drizzlePg } from "drizzle-orm/node-postgres"; +import { pgSchema, pgTable } from "drizzle-orm/pg-core"; +import { + doublePrecision as PgDoublePrecision, + integer as PgInteger, + jsonb as PgJsonb, + numeric as PgNumeric, + text as PgText, +} from "drizzle-orm/pg-core"; +import { + integer as SQLiteInteger, + real as SQLiteReal, + text as SQLiteText, + sqliteTable, +} from "drizzle-orm/sqlite-core"; +import { SQLiteBigintBuilder } from "./bigint.js"; +import { PgHexBuilder, SQLiteHexBuilder } from "./hex.js"; +import { SQLiteJsonBuilder } from "./json.js"; +import { PgListBuilder, SQLiteListBuilder } from "./list.js"; + +export const createDrizzleDb = (database: DatabaseService) => { + if (database.kind === "postgres") { + const drizzle = drizzlePg(database.readonlyPool); + return { + // @ts-ignore + select: (...args: any[]) => drizzle.select(...args), + execute: (query: any) => drizzle.execute(query), + }; + } else { + const drizzle = drizzleSQLite(database.readonlyDatabase); + return { + // @ts-ignore + select: (...args: any[]) => drizzle.select(...args), + execute: (query: any) => { + try { + try { + return drizzle.all(query); + } catch (e) { + const error = e as Error; + if ( + error.name === "SqliteError" && + error.message === + "This statement does not return data. Use run() instead" + ) { + return drizzle.run(query); + } else { + throw error; + } + } + } catch (e) { + const error = e as Error; + if (error.cause) throw error.cause; + throw error; + } + }, + }; + } +}; + +type SQLiteTable = Parameters[1]; +type PostgresTable = Parameters[1]; +type DrizzleTable = { [tableName: string]: any }; + +export const createDrizzleTables = ( + schema: Schema, + database: DatabaseService, + dbNamespace: string, +) => { + const drizzleTables: { [tableName: string]: DrizzleTable } = {}; + + for (const [tableName, { table }] of Object.entries(getTables(schema))) { + const drizzleColumns: DrizzleTable = {}; + + for (const [columnName, column] of Object.entries(table)) { + if (isMaterialColumn(column)) { + if (isJSONColumn(column)) { + drizzleColumns[columnName] = convertJsonColumn( + columnName, + database.kind, + ); + } else if (isEnumColumn(column)) { + if (isListColumn(column)) { + drizzleColumns[columnName] = convertListColumn( + columnName, + database.kind, + "string", + ); + } else { + drizzleColumns[columnName] = convertEnumColumn( + columnName, + database.kind, + ); + } + } else if (isScalarColumn(column) || isReferenceColumn(column)) { + if (isListColumn(column)) { + drizzleColumns[columnName] = convertListColumn( + columnName, + database.kind, + column[" scalar"], + ); + } else { + switch (column[" scalar"]) { + case "string": + drizzleColumns[columnName] = convertStringColumn( + columnName, + database.kind, + ); + break; + + case "int": + drizzleColumns[columnName] = convertIntColumn( + columnName, + database.kind, + ); + break; + + case "boolean": + drizzleColumns[columnName] = convertBooleanColumn( + columnName, + database.kind, + ); + break; + + case "float": + drizzleColumns[columnName] = convertFloatColumn( + columnName, + database.kind, + ); + break; + + case "hex": + drizzleColumns[columnName] = convertHexColumn( + columnName, + database.kind, + ); + break; + + case "bigint": + drizzleColumns[columnName] = convertBigintColumn( + columnName, + database.kind, + ); + break; + } + } + + // apply column constraints + if (columnName === "id") { + drizzleColumns[columnName] = + drizzleColumns[columnName]!.primaryKey(); + } else if (isOptionalColumn(column) === false) { + drizzleColumns[columnName] = drizzleColumns[columnName]!.notNull(); + } + } + } + } + + if (database.kind === "postgres") { + // Note: this is to avoid an error thrown by drizzle when + // setting schema to "public". + if (dbNamespace === "public") { + drizzleTables[tableName] = pgTable( + tableName, + drizzleColumns as PostgresTable, + ); + } else { + drizzleTables[tableName] = pgSchema(dbNamespace).table( + tableName, + drizzleColumns as PostgresTable, + ); + } + } else { + drizzleTables[tableName] = sqliteTable( + tableName, + drizzleColumns as SQLiteTable, + ); + } + } + + return drizzleTables; +}; + +const convertStringColumn = ( + columnName: string, + kind: "sqlite" | "postgres", +) => { + return kind === "sqlite" ? SQLiteText(columnName) : PgText(columnName); +}; + +const convertIntColumn = (columnName: string, kind: "sqlite" | "postgres") => { + return kind === "sqlite" ? SQLiteInteger(columnName) : PgInteger(columnName); +}; + +const convertFloatColumn = ( + columnName: string, + kind: "sqlite" | "postgres", +) => { + return kind === "sqlite" + ? SQLiteReal(columnName) + : PgDoublePrecision(columnName); +}; + +const convertBooleanColumn = ( + columnName: string, + kind: "sqlite" | "postgres", +) => { + return kind === "sqlite" ? SQLiteInteger(columnName) : PgInteger(columnName); +}; + +const convertHexColumn = (columnName: string, kind: "sqlite" | "postgres") => { + return kind === "sqlite" + ? new SQLiteHexBuilder(columnName) + : new PgHexBuilder(columnName); +}; + +const convertBigintColumn = ( + columnName: string, + kind: "sqlite" | "postgres", +) => { + return kind === "sqlite" + ? new SQLiteBigintBuilder(columnName) + : PgNumeric(columnName, { precision: 78 }); +}; + +const convertListColumn = ( + columnName: string, + kind: "sqlite" | "postgres", + element: Scalar, +) => { + return kind === "sqlite" + ? new SQLiteListBuilder(columnName, element) + : new PgListBuilder(columnName, element); +}; + +const convertJsonColumn = (columnName: string, kind: "sqlite" | "postgres") => { + return kind === "sqlite" + ? new SQLiteJsonBuilder(columnName) + : PgJsonb(columnName); +}; + +const convertEnumColumn = (columnName: string, kind: "sqlite" | "postgres") => { + return kind === "sqlite" ? SQLiteText(columnName) : PgText(columnName); +}; diff --git a/packages/core/src/drizzle/select.ts b/packages/core/src/drizzle/select.ts new file mode 100644 index 000000000..dc6a592d2 --- /dev/null +++ b/packages/core/src/drizzle/select.ts @@ -0,0 +1,709 @@ +import type { + Assume, + Column, + MakeColumnConfig, + QueryPromise, + SelectedFields, + SelectedFieldsOrdered, + Subquery, + Table, + TableConfig, + UpdateTableConfig, + ValidateShape, + entityKind, +} from "drizzle-orm"; +import { TypedQueryBuilder } from "drizzle-orm/query-builders/query-builder"; +import type { + AddAliasToSelection, + AppendToNullabilityMap, + AppendToResult, + BuildSubquerySelection, + GetSelectTableName, + GetSelectTableSelection, + JoinNullability, + JoinType, + SelectMode, + SelectResult, + SetOperator, +} from "drizzle-orm/query-builders/select.types"; +import type { + ColumnsSelection, + Placeholder, + Query, + SQL, + View, +} from "drizzle-orm/sql"; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L54 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L50 + */ +export type SelectBuilder< + TSelection extends SelectedFields | undefined, + TResultType extends "sync" | "async", + TRunResult, + TBuilderMode extends "db" | "qb" = "db", +> = { + from: ( + source: TFrom, + ) => CreateSelectFromBuilderMode< + TBuilderMode, + GetSelectTableName, + TResultType, + TRunResult, + TSelection extends undefined ? GetSelectTableSelection : TSelection, + TSelection extends undefined ? "single" : "partial" + >; +}; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L126 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L130 + */ +export abstract class SelectQueryBuilderBase< + THKT extends SelectHKTBase, + TTableName extends string | undefined, + TResultType extends "sync" | "async", + TRunResult, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record< + string, + JoinNullability + > = TTableName extends string ? Record : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult< + TSelection, + TSelectMode, + TNullabilityMap + >[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection< + TSelection, + TNullabilityMap + >, +> extends TypedQueryBuilder { + declare [entityKind]: string; + declare _: { + readonly hkt: THKT; + readonly tableName: TTableName; + readonly resultType: TResultType; + readonly runResult: TRunResult; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; + + declare leftJoin: JoinFn; + declare rightJoin: JoinFn; + declare innerJoin: JoinFn; + declare fullJoin: JoinFn; + + private declare setOperator: >( + rightSelection: + | (( + setOperators: GetSetOperators, + ) => SetOperatorRightSelect) + | SetOperatorRightSelect, + ) => SelectWithout; + + declare union: typeof this.setOperator; + declare unionAll: typeof this.setOperator; + declare intersect: typeof this.setOperator; + declare intersectAll: typeof this.setOperator; + declare except: typeof this.setOperator; + declare exceptAll: typeof this.setOperator; + + declare where: ( + where: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, + ) => SelectWithout; + + declare having: ( + having: + | ((aliases: this["_"]["selection"]) => SQL | undefined) + | SQL + | undefined, + ) => SelectWithout; + + declare groupBy: ( + ...columns: (Column | SQL)[] + ) => SelectWithout; + + declare orderBy: ( + ...columns: (Column | SQL)[] + ) => SelectWithout; + + declare limit: ( + limit: number | Placeholder, + ) => SelectWithout; + + declare offset: ( + offset: number | Placeholder, + ) => SelectWithout; + + declare toSQL: () => Query; + + declare as: ( + alias: TAlias, + ) => SubqueryWithSelection; + + declare $dynamic: () => SelectDynamic; +} + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L803 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L903 + */ +export type SelectBase< + TTableName extends string | undefined, + TResultType extends "sync" | "async", + TRunResult, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode = "single", + TNullabilityMap extends Record< + string, + JoinNullability + > = TTableName extends string ? Record : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection< + TSelection, + TNullabilityMap + >, +> = SelectQueryBuilderBase< + SelectHKT, + TTableName, + TResultType, + TRunResult, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + // @ts-ignore + TResult, + TSelectedFields +> & + QueryPromise; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L31 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L30 + */ +export type SelectJoinConfig = { + on: SQL; + table: Table | Subquery | View | SQL; + alias: string | undefined; + joinType: JoinType; +}; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L38 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L30 + */ +export type BuildAliasTable< + tableOrView extends Table | View, + alias extends string, +> = tableOrView extends Table + ? TableWithColumns< + UpdateTableConfig< + tableOrView["_"]["config"], + { + name: alias; + columns: MapColumnsToTableAlias; + } + > + > + : tableOrView extends View + ? ViewWithSelection< + alias, + tableOrView["_"]["existing"], + MapColumnsToTableAlias + > + : never; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L52 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L52 + */ +export type SelectConfig = { + withList?: Subquery[]; + fields: Record; + fieldsFlat?: SelectedFieldsOrdered; + where?: SQL; + having?: SQL; + table: Table | Subquery | View | SQL; + limit?: number | Placeholder; + offset?: number | Placeholder; + joins?: SelectJoinConfig[]; + orderBy?: (Column | SQL | SQL.Aliased)[]; + groupBy?: (Column | SQL | SQL.Aliased)[]; + distinct?: boolean; + setOperators: { + rightSelect: TypedQueryBuilder; + type: SetOperator; + isAll: boolean; + orderBy?: (Column | SQL | SQL.Aliased)[]; + limit?: number | Placeholder; + offset?: number | Placeholder; + }[]; +}; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L75 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L82 + */ +export type Join< + T extends AnySelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, + TJoinedTable extends Table | Subquery | View | SQL, + TJoinedName extends + GetSelectTableName = GetSelectTableName, +> = T extends any + ? SelectWithout< + SelectKind< + T["_"]["hkt"], + T["_"]["tableName"], + T["_"]["resultType"], + T["_"]["runResult"], + AppendToResult< + T["_"]["tableName"], + T["_"]["selection"], + TJoinedName, + TJoinedTable extends Table + ? TJoinedTable["_"]["columns"] + : TJoinedTable extends Subquery | View + ? Assume< + TJoinedTable["_"]["selectedFields"], + SelectedFields + > + : never, + T["_"]["selectMode"] + >, + T["_"]["selectMode"] extends "partial" + ? T["_"]["selectMode"] + : "multiple", + AppendToNullabilityMap< + T["_"]["nullabilityMap"], + TJoinedName, + TJoinType + >, + T["_"]["dynamic"], + T["_"]["excludedMethods"] + >, + TDynamic, + T["_"]["excludedMethods"] + > + : never; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L106 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L111 + */ +export type JoinFn< + T extends AnySelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, +> = < + TJoinedTable extends Table | Subquery | View | SQL, + TJoinedName extends + GetSelectTableName = GetSelectTableName, +>( + table: TJoinedTable, + on: ((aliases: T["_"]["selection"]) => SQL | undefined) | SQL | undefined, +) => Join; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/query-builders/select.types.ts#L75 + */ +type MapColumnsToTableAlias< + TColumns extends ColumnsSelection, + TAlias extends string, +> = { + [Key in keyof TColumns]: TColumns[Key] extends Column + ? Column["_"], TAlias>> + : TColumns[Key]; +} & {}; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L124 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L146 + */ +export type SelectHKTBase = { + tableName: string | undefined; + resultType: "sync" | "async"; + runResult: unknown; + selection: unknown; + selectMode: SelectMode; + nullabilityMap: unknown; + dynamic: boolean; + excludedMethods: string; + result: unknown; + selectedFields: unknown; + _type: unknown; +}; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L138 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L158 + */ +export type SelectKind< + T extends SelectHKTBase, + TTableName extends string | undefined, + TResultType extends "sync" | "async", + TRunResult, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record, + TDynamic extends boolean, + TExcludedMethods extends string, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> = (T & { + tableName: TTableName; + resultType: TResultType; + runResult: TRunResult; + selection: TSelection; + selectMode: TSelectMode; + nullabilityMap: TNullabilityMap; + dynamic: TDynamic; + excludedMethods: TExcludedMethods; + result: TResult; + selectedFields: TSelectedFields; +})["_type"]; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L163 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L179 + */ +export interface SelectQueryBuilderHKT extends SelectHKTBase { + _type: SelectQueryBuilderBase< + SelectQueryBuilderHKT, + this["tableName"], + this["resultType"], + this["runResult"], + Assume, + this["selectMode"], + Assume>, + this["dynamic"], + this["excludedMethods"], + Assume, + Assume + >; +} + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L179 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L193 + */ +export interface SelectHKT extends SelectHKTBase { + _type: SelectBase< + this["tableName"], + this["resultType"], + this["runResult"], + Assume, + this["selectMode"], + Assume>, + this["dynamic"], + this["excludedMethods"], + Assume, + Assume + >; +} + +export type SetOperatorExcludedMethods = + | "leftJoin" + | "rightJoin" + | "innerJoin" + | "fullJoin" + | "where" + | "having" + | "groupBy"; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L204 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L206 + */ +export type CreateSelectFromBuilderMode< + TBuilderMode extends "db" | "qb", + TTableName extends string | undefined, + TResultType extends "sync" | "async", + TRunResult, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, +> = TBuilderMode extends "db" + ? SelectBase + : SelectQueryBuilderBase< + SelectQueryBuilderHKT, + TTableName, + TResultType, + TRunResult, + TSelection, + TSelectMode + >; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L227 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L224 + */ +type SelectWithout< + T extends AnySelectQueryBuilder, + TDynamic extends boolean, + K extends keyof T & string, + TResetExcluded extends boolean = false, +> = TDynamic extends true + ? T + : Omit< + SelectKind< + T["_"]["hkt"], + T["_"]["tableName"], + T["_"]["resultType"], + T["_"]["runResult"], + T["_"]["selection"], + T["_"]["selectMode"], + T["_"]["nullabilityMap"], + TDynamic, + TResetExcluded extends true ? K : T["_"]["excludedMethods"] | K, + T["_"]["result"], + T["_"]["selectedFields"] + >, + TResetExcluded extends true ? K : T["_"]["excludedMethods"] | K + >; + +export type SelectDynamic = SelectKind< + T["_"]["hkt"], + T["_"]["tableName"], + T["_"]["resultType"], + T["_"]["runResult"], + T["_"]["selection"], + T["_"]["selectMode"], + T["_"]["nullabilityMap"], + true, + never, + T["_"]["result"], + T["_"]["selectedFields"] +>; + +export type AnySelectQueryBuilder = SelectQueryBuilderBase< + any, + any, + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export type AnySetOperatorInterface = SetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export interface SetOperatorInterface< + TTableName extends string | undefined, + TResultType extends "sync" | "async", + TRunResult, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode = "single", + TNullabilityMap extends Record< + string, + JoinNullability + > = TTableName extends string ? Record : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult< + TSelection, + TSelectMode, + TNullabilityMap + >[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection< + TSelection, + TNullabilityMap + >, +> { + _: { + readonly hkt: SelectHKTBase; + readonly tableName: TTableName; + readonly resultType: TResultType; + readonly runResult: TRunResult; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; +} + +export type SetOperatorWithResult = SetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any, + TResult, + any +>; + +export type SetOperatorRightSelect< + TValue extends SetOperatorWithResult, + TResult extends any[], +> = TValue extends SetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any, + infer TValueResult, + any +> + ? ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder + > + : TValue; + +export type SetOperatorRestSelect< + TValue extends readonly SetOperatorWithResult[], + TResult extends any[], +> = TValue extends [infer First, ...infer Rest] + ? First extends SetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any, + infer TValueResult, + any + > + ? Rest extends AnySetOperatorInterface[] + ? [ + ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder + >, + ...SetOperatorRestSelect, + ] + : ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder[] + > + : never + : TValue; + +export type CreateSetOperatorFn = < + TTableName extends string | undefined, + TResultType extends "sync" | "async", + TRunResult, + TSelection extends ColumnsSelection, + TValue extends SetOperatorWithResult, + TRest extends SetOperatorWithResult[], + TSelectMode extends SelectMode = "single", + TNullabilityMap extends Record< + string, + JoinNullability + > = TTableName extends string ? Record : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult< + TSelection, + TSelectMode, + TNullabilityMap + >[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection< + TSelection, + TNullabilityMap + >, +>( + leftSelect: SetOperatorInterface< + TTableName, + TResultType, + TRunResult, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + rightSelect: SetOperatorRightSelect, + ...restSelects: SetOperatorRestSelect +) => SelectWithout< + SelectBase< + TTableName, + TResultType, + TRunResult, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + false, + SetOperatorExcludedMethods, + true +>; + +export type GetSetOperators = { + union: CreateSetOperatorFn; + intersect: CreateSetOperatorFn; + except: CreateSetOperatorFn; + unionAll: CreateSetOperatorFn; +}; + +export type SubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = Subquery> & + AddAliasToSelection; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/table.ts#L49 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/table.ts#L43 + */ +export type TableWithColumns = Table & { + [key in keyof T["columns"]]: T["columns"][key]; +}; + +/** + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/view.ts#L154 + * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/view.ts#L305 + */ +export type ViewWithSelection< + TName extends string, + TExisting extends boolean, + TSelection extends ColumnsSelection, +> = View & TSelection; diff --git a/packages/core/src/drizzle/table.test-d.ts b/packages/core/src/drizzle/table.test-d.ts new file mode 100644 index 000000000..7c57393be --- /dev/null +++ b/packages/core/src/drizzle/table.test-d.ts @@ -0,0 +1,152 @@ +import { createSchema } from "@/index.js"; +import { eq } from "drizzle-orm"; +import type { Hex } from "viem"; +import { expectTypeOf, test } from "vitest"; +import type { DrizzleDb } from "./db.js"; +import type { DrizzleTable } from "./table.js"; + +test("select query promise", async () => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + name: p.int().optional(), + }), + })); + + const table = {} as DrizzleTable< + "table", + (typeof schema)["table"]["table"], + typeof schema + >; + + const result = await ({} as DrizzleDb).select({ id: table.id }).from(table); + // ^? + + expectTypeOf<{ id: string }[]>(result); +}); + +test("select optional column", async () => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + name: p.int().optional(), + }), + })); + + const table = {} as DrizzleTable< + "table", + (typeof schema)["table"]["table"], + typeof schema + >; + + const result = await ({} as DrizzleDb).select().from(table); + // ^? + + expectTypeOf<{ id: string; name: number | null }[]>(result); +}); + +test("select enum", async () => { + const schema = createSchema((p) => ({ + e: p.createEnum(["yes", "no"]), + table: p.createTable({ + id: p.string(), + e: p.enum("e"), + }), + })); + + const table = {} as DrizzleTable< + "table", + (typeof schema)["table"]["table"], + typeof schema + >; + + const result = await ({} as DrizzleDb).select().from(table); + // ^? + + expectTypeOf<{ id: string; e: "yes" | "no" }[]>(result); +}); + +test("select json", async () => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + json: p.json<{ a: number; b: string }>(), + }), + })); + + const table = {} as DrizzleTable< + "table", + (typeof schema)["table"]["table"], + typeof schema + >; + + const result = await ({} as DrizzleDb).select().from(table); + // ^? + + expectTypeOf<{ id: string; json: { a: number; b: string } }[]>(result); +}); + +test("select list", async () => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + list: p.string().list(), + }), + })); + + const table = {} as DrizzleTable< + "table", + (typeof schema)["table"]["table"], + typeof schema + >; + + const result = await ({} as DrizzleDb).select().from(table); + // ^? + + expectTypeOf<{ id: string; list: string[] }[]>(result); +}); + +test("select join", async () => { + const schema = createSchema((p) => ({ + account: p.createTable({ + id: p.hex(), + name: p.string(), + age: p.int(), + }), + nft: p.createTable({ + id: p.bigint(), + owner: p.hex().references("account.id"), + }), + })); + + const account = {} as DrizzleTable< + "account", + (typeof schema)["account"]["table"], + typeof schema + >; + const nft = {} as DrizzleTable< + "nft", + (typeof schema)["nft"]["table"], + typeof schema + >; + + const result = await ({} as DrizzleDb) + // ^? + .select() + .from(account) + .fullJoin(nft, eq(account.id, nft.owner)); + + expectTypeOf< + { + account: { + id: Hex; + name: string; + age: number; + } | null; + nft: { + id: bigint; + owner: Hex; + } | null; + }[] + >(result); +}); diff --git a/packages/core/src/drizzle/table.ts b/packages/core/src/drizzle/table.ts new file mode 100644 index 000000000..6fe78fc6a --- /dev/null +++ b/packages/core/src/drizzle/table.ts @@ -0,0 +1,51 @@ +import type { + EnumColumn, + ExtractNonVirtualColumnNames, + JSONColumn, + Schema as PonderSchema, + Table as PonderTable, + ReferenceColumn, + ScalarColumn, +} from "@/schema/common.js"; +import type { InferColumnType } from "@/schema/infer.js"; +import type { BuildColumns, ColumnBuilderBase } from "drizzle-orm"; +import type { TableWithColumns } from "./select.js"; + +/** + * Performs type transformation between Ponder and Drizzle column representation. + * + * @returns TableWithColumns + */ +export type DrizzleTable< + tableName extends string, + table extends PonderTable, + schema extends PonderSchema, +> = TableWithColumns<{ + name: tableName; + schema: undefined; + columns: BuildColumns< + tableName, + { + [columnName in ExtractNonVirtualColumnNames]: ColumnBuilderBase<{ + name: columnName & string; + dataType: "custom"; + columnType: "ponder"; + data: InferColumnType; + driverParam: unknown; + enumValues: undefined; + notNull: (table[columnName] & + ( + | ScalarColumn + | ReferenceColumn + | EnumColumn + | JSONColumn + ))[" optional"] extends true + ? false + : true; + primaryKey: columnName extends "id" ? true : false; + }>; + }, + "common" + >; + dialect: "common"; +}>; diff --git a/packages/core/src/server/graphql/buildGraphqlSchema.test.ts b/packages/core/src/graphql/buildGraphqlSchema.test.ts similarity index 95% rename from packages/core/src/server/graphql/buildGraphqlSchema.test.ts rename to packages/core/src/graphql/buildGraphqlSchema.test.ts index 51ae22ca0..68dc6becd 100644 --- a/packages/core/src/server/graphql/buildGraphqlSchema.test.ts +++ b/packages/core/src/graphql/buildGraphqlSchema.test.ts @@ -9,7 +9,7 @@ import { createSchema } from "@/schema/schema.js"; import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; import { type GraphQLType, execute, parse } from "graphql"; import { beforeEach, expect, test } from "vitest"; -import { buildGraphqlSchema } from "./buildGraphqlSchema.js"; +import { buildGraphQLSchema } from "./buildGraphqlSchema.js"; import { buildLoaderCache } from "./buildLoaderCache.js"; beforeEach(setupCommon); @@ -50,7 +50,7 @@ test("scalar", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -118,7 +118,7 @@ test("scalar list", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -186,7 +186,7 @@ test("scalar optional", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -254,7 +254,7 @@ test("scalar optional list", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -312,7 +312,7 @@ test("json", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -361,7 +361,7 @@ test("enum", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -410,7 +410,7 @@ test("enum optional", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -468,7 +468,7 @@ test("enum list", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -526,7 +526,7 @@ test("enum optional list", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -580,7 +580,7 @@ test("one", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -651,7 +651,7 @@ test("many", async (context) => { id: "0", }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -732,7 +732,7 @@ test("many w/ filter", async (context) => { id: "0", }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -787,7 +787,7 @@ test("bigint id", async (context) => { id: 0n, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -829,7 +829,7 @@ test("hex id", async (context) => { id: "0x00", }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -873,7 +873,7 @@ test("filter string eq", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -935,7 +935,7 @@ test("filter string in", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1009,7 +1009,7 @@ test("filter string contains", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1083,7 +1083,7 @@ test("filter string starts with", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1157,7 +1157,7 @@ test("filter string not ends with", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1219,7 +1219,7 @@ test("filter int eq", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1293,7 +1293,7 @@ test("filter int gt", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1355,7 +1355,7 @@ test("filter int lte", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1417,7 +1417,7 @@ test("filter int in", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1479,7 +1479,7 @@ test("filter float eq", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1553,7 +1553,7 @@ test("filter float gt", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1615,7 +1615,7 @@ test("filter float lte", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1677,7 +1677,7 @@ test("filter float in", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1739,7 +1739,7 @@ test("filter bigint eq", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1813,7 +1813,7 @@ test("filter bigint gt", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1875,7 +1875,7 @@ test("filter bigint lte", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1937,7 +1937,7 @@ test("filter bigint in", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -1999,7 +1999,7 @@ test("filer hex eq", async (context) => { await create("0", indexingStore); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2073,7 +2073,7 @@ test("filter hex gt", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2147,7 +2147,7 @@ test("filter string list eq", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2221,7 +2221,7 @@ test("filter string list has", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2286,7 +2286,7 @@ test("filter enum eq", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2341,7 +2341,7 @@ test("filter enum in", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2401,7 +2401,7 @@ test("filter ref eq", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2468,7 +2468,7 @@ test("filter ref in", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2554,7 +2554,7 @@ test("order int asc", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2638,7 +2638,7 @@ test("order bigint asc", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2722,7 +2722,7 @@ test("order bigint desc", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2780,7 +2780,7 @@ test("limit default", async (context) => { await create(String(i), indexingStore); } - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2831,7 +2831,7 @@ test("limit", async (context) => { await create(String(i), indexingStore); } - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2882,7 +2882,7 @@ test("limit error", async (context) => { // await create(String(i), indexingStore); // } - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { @@ -2936,7 +2936,7 @@ test("filter type has correct suffixes and types", () => { }), })); - const serverSchema = buildGraphqlSchema(s); + const serverSchema = buildGraphQLSchema(s); const typeMap = serverSchema.getTypeMap(); @@ -3048,7 +3048,7 @@ test("metadata", async (context) => { const metadataStore = getMetadataStore({ encoding: database.kind, - db: database.readonlyDb, + db: database.indexingDb, namespaceInfo, }); @@ -3062,7 +3062,7 @@ test("metadata", async (context) => { }, }); - const graphqlSchema = buildGraphqlSchema(schema); + const graphqlSchema = buildGraphQLSchema(schema); const document = parse(` query { diff --git a/packages/core/src/server/graphql/buildGraphqlSchema.ts b/packages/core/src/graphql/buildGraphqlSchema.ts similarity index 97% rename from packages/core/src/server/graphql/buildGraphqlSchema.ts rename to packages/core/src/graphql/buildGraphqlSchema.ts index 87978df62..1cc918e9b 100644 --- a/packages/core/src/server/graphql/buildGraphqlSchema.ts +++ b/packages/core/src/graphql/buildGraphqlSchema.ts @@ -22,7 +22,7 @@ export type Context = { metadataStore: MetadataStore; }; -export const buildGraphqlSchema = (schema: Schema): GraphQLSchema => { +export const buildGraphQLSchema = (schema: Schema): GraphQLSchema => { const queryFields: Record> = {}; const { enumTypes } = buildEnumTypes({ schema }); diff --git a/packages/core/src/server/graphql/buildLoaderCache.ts b/packages/core/src/graphql/buildLoaderCache.ts similarity index 100% rename from packages/core/src/server/graphql/buildLoaderCache.ts rename to packages/core/src/graphql/buildLoaderCache.ts diff --git a/packages/core/src/server/graphql/entity.ts b/packages/core/src/graphql/entity.ts similarity index 99% rename from packages/core/src/server/graphql/entity.ts rename to packages/core/src/graphql/entity.ts index 351779059..ef5697248 100644 --- a/packages/core/src/server/graphql/entity.ts +++ b/packages/core/src/graphql/entity.ts @@ -23,9 +23,9 @@ import { GraphQLObjectType, GraphQLString, } from "graphql"; -import { GraphQLJSON } from "graphql-type-json"; import type { Context, Parent } from "./buildGraphqlSchema.js"; import { buildWhereObject } from "./filter.js"; +import { GraphQLJSON } from "./graphQLJson.js"; import type { PluralResolver } from "./plural.js"; import { SCALARS } from "./scalar.js"; diff --git a/packages/core/src/server/graphql/enum.ts b/packages/core/src/graphql/enum.ts similarity index 100% rename from packages/core/src/server/graphql/enum.ts rename to packages/core/src/graphql/enum.ts diff --git a/packages/core/src/server/graphql/filter.test.ts b/packages/core/src/graphql/filter.test.ts similarity index 100% rename from packages/core/src/server/graphql/filter.test.ts rename to packages/core/src/graphql/filter.test.ts diff --git a/packages/core/src/server/graphql/filter.ts b/packages/core/src/graphql/filter.ts similarity index 100% rename from packages/core/src/server/graphql/filter.ts rename to packages/core/src/graphql/filter.ts diff --git a/packages/core/src/graphql/graphQLJson.ts b/packages/core/src/graphql/graphQLJson.ts new file mode 100644 index 000000000..083d559dc --- /dev/null +++ b/packages/core/src/graphql/graphQLJson.ts @@ -0,0 +1,63 @@ +import { + type GraphQLScalarLiteralParser, + GraphQLScalarType, + Kind, + type ObjectValueNode, + type ValueNode, + print, +} from "graphql"; + +// Modified from https://github.com/taion/graphql-type-json/blob/master/src/index.js + +export const GraphQLJSON = new GraphQLScalarType({ + name: "JSON", + description: + "The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).", + serialize: (x) => x, + parseValue: (x) => x, + parseLiteral: (ast, variables) => { + if (ast.kind !== Kind.OBJECT) { + throw new TypeError( + `JSONObject cannot represent non-object value: ${print(ast)}`, + ); + } + + return parseObject(ast, variables); + }, +}); + +const parseLiteral = ( + ast: ValueNode, + variables: Parameters[1], +): ReturnType> => { + switch (ast.kind) { + case Kind.STRING: + case Kind.BOOLEAN: + return ast.value; + case Kind.INT: + case Kind.FLOAT: + return Number.parseFloat(ast.value); + case Kind.OBJECT: + return parseObject(ast, variables); + case Kind.LIST: + return ast.values.map((n) => parseLiteral(n, variables)); + case Kind.NULL: + return null; + case Kind.VARIABLE: + return variables ? variables[ast.name.value] : undefined; + default: + throw new TypeError(`JSON cannot represent value: ${print(ast)}`); + } +}; + +const parseObject = ( + ast: ObjectValueNode, + variables: Parameters[1], +) => { + const value = Object.create(null); + ast.fields.forEach((field) => { + value[field.name.value] = parseLiteral(field.value, variables); + }); + + return value; +}; diff --git a/packages/core/src/graphql/index.test.ts b/packages/core/src/graphql/index.test.ts new file mode 100644 index 000000000..a6e80ea11 --- /dev/null +++ b/packages/core/src/graphql/index.test.ts @@ -0,0 +1,316 @@ +import { + setupCommon, + setupDatabaseServices, + setupIsolatedDatabase, +} from "@/_test/setup.js"; +import type { HistoricalStore, ReadonlyStore } from "@/indexing-store/store.js"; +import type { Schema } from "@/schema/common.js"; +import { createSchema } from "@/schema/schema.js"; +import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; +import { Hono } from "hono"; +import { createMiddleware } from "hono/factory"; +import { beforeEach, expect, test } from "vitest"; +import { graphql } from "./index.js"; + +beforeEach(setupCommon); +beforeEach(setupIsolatedDatabase); + +const contextMiddleware = (schema: Schema, readonlyStore: ReadonlyStore) => + createMiddleware(async (c, next) => { + c.set("readonlyStore", readonlyStore); + c.set("schema", schema); + await next(); + }); + +test("graphQLMiddleware serves request", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + string: p.string(), + int: p.int(), + float: p.float(), + boolean: p.boolean(), + hex: p.hex(), + bigint: p.bigint(), + }), + })); + + const { indexingStore, readonlyStore, cleanup } = await setupDatabaseServices( + context, + { schema }, + ); + + await indexingStore.create({ + tableName: "table", + encodedCheckpoint: encodeCheckpoint(zeroCheckpoint), + id: "0", + data: { + string: "0", + int: 0, + float: 0, + boolean: false, + hex: "0x0", + bigint: 0n, + }, + }); + + await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); + + const app = new Hono() + .use(contextMiddleware(schema, readonlyStore)) + .use("/graphql", graphql()); + + const response = await app.request("/graphql", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + query: ` + query { + table(id: "0") { + id + string + int + float + boolean + hex + bigint + } + } + `, + }), + }); + + expect(response.status).toBe(200); + + expect(await response.json()).toMatchObject({ + data: { + table: { + id: "0", + string: "0", + int: 0, + float: 0, + boolean: false, + hex: "0x00", + bigint: "0", + }, + }, + }); + + await cleanup(); +}); + +test("graphQLMiddleware throws error when extra filter is applied", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ + id: p.string(), + string: p.string(), + int: p.int(), + float: p.float(), + boolean: p.boolean(), + hex: p.hex(), + bigint: p.bigint(), + }), + })); + + const { readonlyStore, cleanup } = await setupDatabaseServices(context, { + schema, + }); + + const app = new Hono() + .use(contextMiddleware(schema, readonlyStore)) + .use("/graphql", graphql()); + + const response = await app.request("/graphql", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + query: ` + { + table(id: "0", doesntExist: "kevin") { + id + string + int + float + boolean + hex + bigint + } + } + `, + }), + }); + + expect(response.status).toBe(200); + const body = await response.json(); + expect(body.errors[0].message).toBe( + 'Unknown argument "doesntExist" on field "Query.table".', + ); + + await cleanup(); +}); + +test("graphQLMiddleware throws error for token limit", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ id: p.string() }), + })); + + const { readonlyStore, cleanup } = await setupDatabaseServices(context, { + schema, + }); + + const app = new Hono() + .use(contextMiddleware(schema, readonlyStore)) + .use("/graphql", graphql({ maxOperationTokens: 3 })); + + const response = await app.request("/graphql", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + query: ` + { + __schema { + types { + fields { + type { + fields { + type { + description + } + } + } + } + } + } + } + `, + }), + }); + + expect(response.status).toBe(200); + const body = await response.json(); + expect(body.errors[0].message).toBe( + "Syntax Error: Token limit of 3 exceeded.", + ); + + await cleanup(); +}); + +test("graphQLMiddleware throws error for depth limit", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ id: p.string() }), + })); + + const { readonlyStore, cleanup } = await setupDatabaseServices(context, { + schema, + }); + + const app = new Hono() + .use(contextMiddleware(schema, readonlyStore)) + .use("/graphql", graphql({ maxOperationDepth: 5 })); + + const response = await app.request("/graphql", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + query: ` + { + __schema { + types { + fields { + type { + fields { + type { + description + } + } + } + } + } + } + } + `, + }), + }); + + expect(response.status).toBe(200); + const body = await response.json(); + expect(body.errors[0].message).toBe( + "Syntax Error: Query depth limit of 5 exceeded, found 7.", + ); + + await cleanup(); +}); + +test("graphQLMiddleware throws error for max aliases", async (context) => { + const schema = createSchema((p) => ({ + table: p.createTable({ id: p.string() }), + })); + + const { readonlyStore, cleanup } = await setupDatabaseServices(context, { + schema, + }); + + const app = new Hono() + .use(contextMiddleware(schema, readonlyStore)) + .use("/graphql", graphql({ maxOperationAliases: 2 })); + + const response = await app.request("/graphql", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + query: ` + { + __schema { + types { + fields { + type { + alias1: fields { + type { + description + } + } + alias2: fields { + type { + description + } + } + alias3: fields { + type { + description + } + } + } + } + } + } + } + `, + }), + }); + + expect(response.status).toBe(200); + const body = await response.json(); + expect(body.errors[0].message).toBe( + "Syntax Error: Aliases limit of 2 exceeded, found 3.", + ); + + await cleanup(); +}); + +test("graphQLMiddleware interactive", async (context) => { + const { readonlyStore, cleanup } = await setupDatabaseServices(context, { + schema: {}, + }); + + const app = new Hono() + .use(contextMiddleware({}, readonlyStore)) + .use("/graphql", graphql({ maxOperationAliases: 2 })); + + const response = await app.request("/graphql"); + + expect(response.status).toBe(200); + + await cleanup(); +}); diff --git a/packages/core/src/graphql/index.ts b/packages/core/src/graphql/index.ts new file mode 100644 index 000000000..bdabd7650 --- /dev/null +++ b/packages/core/src/graphql/index.ts @@ -0,0 +1,86 @@ +import { graphiQLHtml } from "@/ui/graphiql.html.js"; +import { maxAliasesPlugin } from "@escape.tech/graphql-armor-max-aliases"; +import { maxDepthPlugin } from "@escape.tech/graphql-armor-max-depth"; +import { maxTokensPlugin } from "@escape.tech/graphql-armor-max-tokens"; +import { type YogaServerInstance, createYoga } from "graphql-yoga"; +import { createMiddleware } from "hono/factory"; +import { buildGraphQLSchema } from "./buildGraphqlSchema.js"; +import { buildLoaderCache } from "./buildLoaderCache.js"; + +/** + * Middleware for GraphQL with an interactive web view. + * + * - Docs: https://ponder.sh/docs/query/api-functions#register-graphql-middleware + * + * @example + * import { ponder } from "@/generated"; + * import { graphql } from "@ponder/core"; + * + * ponder.use("/graphql", graphql()); + * + */ +export const graphql = ( + { + maxOperationTokens = 1000, + maxOperationDepth = 100, + maxOperationAliases = 30, + }: { + maxOperationTokens?: number; + maxOperationDepth?: number; + maxOperationAliases?: number; + } = { + // Default limits are from Apollo: + // https://www.apollographql.com/blog/prevent-graph-misuse-with-operation-size-and-complexity-limit + maxOperationTokens: 1000, + maxOperationDepth: 100, + maxOperationAliases: 30, + }, +) => { + let yoga: YogaServerInstance | undefined = undefined; + + return createMiddleware(async (c) => { + if (c.req.method === "GET") { + return c.html(graphiQLHtml(c.req.path)); + } + + if (yoga === undefined) { + const readonlyStore = c.get("readonlyStore"); + const metadataStore = c.get("metadataStore"); + const schema = c.get("schema"); + const graphqlSchema = buildGraphQLSchema(schema); + + yoga = createYoga({ + schema: graphqlSchema, + context: () => { + const getLoader = buildLoaderCache({ store: readonlyStore }); + return { readonlyStore, metadataStore, getLoader }; + }, + graphqlEndpoint: c.req.path, + maskedErrors: process.env.NODE_ENV === "production", + logging: false, + graphiql: false, + parserAndValidationCache: false, + plugins: [ + maxTokensPlugin({ n: maxOperationTokens }), + maxDepthPlugin({ + n: maxOperationDepth, + ignoreIntrospection: false, + }), + maxAliasesPlugin({ + n: maxOperationAliases, + allowList: [], + }), + ], + }); + } + + const response = await yoga.handle(c.req.raw); + // TODO: Figure out why Yoga is returning 500 status codes for GraphQL errors. + // @ts-expect-error + response.status = 200; + // @ts-expect-error + response.statusText = "OK"; + + return response; + }); +}; diff --git a/packages/core/src/server/graphql/metadata.ts b/packages/core/src/graphql/metadata.ts similarity index 77% rename from packages/core/src/server/graphql/metadata.ts rename to packages/core/src/graphql/metadata.ts index c6ba3e8d2..cdf527dbd 100644 --- a/packages/core/src/server/graphql/metadata.ts +++ b/packages/core/src/graphql/metadata.ts @@ -1,5 +1,5 @@ import { GraphQLObjectType } from "graphql"; -import { GraphQLJSON } from "graphql-type-json"; +import { GraphQLJSON } from "./graphQLJson.js"; export const metadataEntity = new GraphQLObjectType({ name: "_meta", diff --git a/packages/core/src/server/graphql/plural.ts b/packages/core/src/graphql/plural.ts similarity index 100% rename from packages/core/src/server/graphql/plural.ts rename to packages/core/src/graphql/plural.ts diff --git a/packages/core/src/server/graphql/scalar.ts b/packages/core/src/graphql/scalar.ts similarity index 100% rename from packages/core/src/server/graphql/scalar.ts rename to packages/core/src/graphql/scalar.ts diff --git a/packages/core/src/server/graphql/singular.ts b/packages/core/src/graphql/singular.ts similarity index 100% rename from packages/core/src/server/graphql/singular.ts rename to packages/core/src/graphql/singular.ts diff --git a/packages/core/src/hono/context.ts b/packages/core/src/hono/context.ts new file mode 100644 index 000000000..067b2dfc7 --- /dev/null +++ b/packages/core/src/hono/context.ts @@ -0,0 +1,52 @@ +import type { Schema } from "@/schema/common.js"; +import type { ApiContext } from "@/types/api.js"; +import type { Env, Context as HonoContext, Input } from "hono"; + +export type Context< + schema extends Schema = Schema, + path extends string = string, + input extends Input = {}, +> = ApiContext & { + /** + * Hono request object. + * + * @see https://hono.dev/docs/api/context#req + */ + req: HonoContext["req"]; + /** + * Hono response object. + * + * @see https://hono.dev/docs/api/context#res + */ + res: HonoContext["req"]; + /** + * Return the HTTP response. + * + * @see https://hono.dev/docs/api/context#body + */ + body: HonoContext["body"]; + /** + * Render text as `Content-Type:text/plain`. + * + * @see https://hono.dev/docs/api/context#text + */ + text: HonoContext["text"]; + /** + * Render JSON as `Content-Type:application/json`. + * + * @see https://hono.dev/docs/api/context#json + */ + json: HonoContext["json"]; + /** + * Hono redirect. + * + * @see https://hono.dev/docs/api/context#redirect + */ + redirect: HonoContext["redirect"]; +}; + +export type MiddlewareContext< + schema extends Schema = Schema, + path extends string = string, + input extends Input = {}, +> = ApiContext & HonoContext; diff --git a/packages/core/src/hono/handler.ts b/packages/core/src/hono/handler.ts new file mode 100644 index 000000000..725a95764 --- /dev/null +++ b/packages/core/src/hono/handler.ts @@ -0,0 +1,760 @@ +import type { Schema } from "@/schema/common.js"; +import type { ApiRegistry } from "@/types/api.js"; +import type { BlankInput, HandlerResponse, Input, Next } from "hono/types"; +import type { Context, MiddlewareContext } from "./context.js"; + +export type Handler< + schema extends Schema = Schema, + path extends string = any, + input extends Input = BlankInput, + response extends HandlerResponse = any, +> = (c: Context) => response; + +export type MiddlewareHandler< + schema extends Schema = Schema, + path extends string = string, + input extends Input = {}, +> = ( + c: MiddlewareContext, + next: Next, +) => Promise; + +type BasePath = "/"; + +export type HandlerInterface = { + // app.get(handler) + < + path extends string = BasePath, + input extends Input = BlankInput, + response extends HandlerResponse = any, + >( + handler: Handler, + ): ApiRegistry; + + // app.get(handler x2) + < + path extends string = BasePath, + input extends Input = BlankInput, + input2 extends Input = input, + response extends HandlerResponse = any, + >( + ...handlers: [ + Handler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + >( + path: path, + handler: Handler, + ): ApiRegistry; + + // app.get(handler x 3) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x2) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 4) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x3) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 5) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x4) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 6) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x5) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 7) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x6) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 8) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + input8 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x7) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 9) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + input8 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7, + input9 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7 & + input8, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x8) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + input8 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(handler x 10) + < + path extends string = BasePath, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + input8 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7, + input9 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7 & + input8, + input10 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7 & + input8 & + input9, + >( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x9) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + input8 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7, + input9 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7 & + input8, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(path, handler x10) + < + path extends string, + response extends HandlerResponse = any, + input extends Input = BlankInput, + input2 extends Input = input, + input3 extends Input = input & input2, + input4 extends Input = input & input2 & input3, + input5 extends Input = input & input2 & input3 & input4, + input6 extends Input = input & input2 & input3 & input4 & input5, + input7 extends Input = input & input2 & input3 & input4 & input5 & input6, + input8 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7, + input9 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7 & + input8, + input10 extends Input = input & + input2 & + input3 & + input4 & + input5 & + input6 & + input7 & + input8 & + input9, + >( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + Handler, + ] + ): ApiRegistry; + + // app.get(...handlers[]) + < + path extends string = BasePath, + input extends Input = BlankInput, + response extends HandlerResponse = any, + >( + ...handlers: Handler[] + ): ApiRegistry; + + // app.get(path, ...handlers[]) + < + path extends string, + input extends Input = BlankInput, + response extends HandlerResponse = any, + >( + path: path, + ...handlers: Handler[] + ): ApiRegistry; + + // app.get(path) + (path: path): ApiRegistry; +}; + +export interface MiddlewareHandlerInterface { + //// app.use(...handlers[]) + (...handlers: MiddlewareHandler[]): ApiRegistry; + + // app.use(handler) + (handler: MiddlewareHandler): ApiRegistry; + + // app.use(handler x2) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler) + ( + path: path, + handler: MiddlewareHandler, + ): ApiRegistry; + + // app.use(handler x3) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x2) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x4) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x3) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x5) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x4) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x6) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x5) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x7) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x6) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x8) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x7) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x9) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x8) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.use(handler x10) + ( + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + // app.get(path, handler x9) + ( + path: path, + ...handlers: [ + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + MiddlewareHandler, + ] + ): ApiRegistry; + + //// app.use(path, ...handlers[]) + ( + path: path, + ...handlers: MiddlewareHandler[] + ): ApiRegistry; +} diff --git a/packages/core/src/hono/index.test.ts b/packages/core/src/hono/index.test.ts new file mode 100644 index 000000000..021caeed2 --- /dev/null +++ b/packages/core/src/hono/index.test.ts @@ -0,0 +1,102 @@ +import { Hono } from "hono"; +import { expect, test } from "vitest"; +import type { Handler } from "./handler.js"; +import { type PonderRoutes, applyHonoRoutes } from "./index.js"; + +type MockPonderHono = { + routes: PonderRoutes; + get: ( + maybePathOrHandler: string | Handler, + ...handlers: Handler[] + ) => MockPonderHono; + post: ( + maybePathOrHandler: string | Handler, + ...handlers: Handler[] + ) => MockPonderHono; + use: ( + maybePathOrHandler: string | Handler, + ...handlers: Handler[] + ) => MockPonderHono; +}; + +const getMockPonderHono = (): MockPonderHono => ({ + routes: [], + get(..._handlers) { + this.routes.push({ method: "GET", pathOrHandlers: _handlers }); + return this; + }, + post(..._handlers) { + this.routes.push({ method: "POST", pathOrHandlers: _handlers }); + return this; + }, + use(..._handlers) { + this.routes.push({ method: "USE", pathOrHandlers: _handlers }); + return this; + }, +}); + +test("get() w/o path", async () => { + const ponderHono = getMockPonderHono().get((c) => { + return c.text("hi"); + }); + + const hono = applyHonoRoutes(new Hono(), ponderHono.routes); + + const response = await hono.request(""); + expect(await response.text()).toBe("hi"); +}); + +test("get() w/ path", async () => { + const ponderHono = getMockPonderHono().get("/hi", (c) => { + return c.text("hi"); + }); + + const hono = applyHonoRoutes(new Hono(), ponderHono.routes); + + const response = await hono.request("/hi"); + expect(await response.text()).toBe("hi"); +}); + +test("get() w/ middlware", async () => { + const ponderHono = getMockPonderHono().get( + "/hi", + // @ts-ignore + (c, next) => { + next(); + }, + (c) => { + return c.text("hi"); + }, + ); + + const hono = applyHonoRoutes(new Hono(), ponderHono.routes); + + const response = await hono.request("/hi"); + expect(await response.text()).toBe("hi"); +}); + +test("use() w/o path", async () => { + // @ts-ignore + const ponderHono = getMockPonderHono().use((c, next) => { + next(); + return c.text("hi"); + }); + + const hono = applyHonoRoutes(new Hono(), ponderHono.routes); + + const response = await hono.request(""); + expect(await response.text()).toBe("hi"); +}); + +test("use() w/ path", async () => { + // @ts-ignore + const ponderHono = getMockPonderHono().use("/hi", (c, next) => { + next(); + return c.text("hi"); + }); + + const hono = applyHonoRoutes(new Hono(), ponderHono.routes); + + const response = await hono.request("/hi"); + expect(await response.text()).toBe("hi"); +}); diff --git a/packages/core/src/hono/index.ts b/packages/core/src/hono/index.ts new file mode 100644 index 000000000..6674fa16c --- /dev/null +++ b/packages/core/src/hono/index.ts @@ -0,0 +1,66 @@ +import type { Hono } from "hono"; +import type { Handler, MiddlewareHandler } from "./handler.js"; + +export type PonderRoutes = { + method: "GET" | "POST" | "USE"; + pathOrHandlers: [ + maybePathOrHandler: string | Handler | MiddlewareHandler, + ...handlers: (Handler | MiddlewareHandler)[], + ]; +}[]; + +export const applyHonoRoutes = ( + hono: Hono, + routes: PonderRoutes, + customContext?: object, +) => { + // add custom properties to hono context + const addCustomContext = + (handler: Handler | MiddlewareHandler) => (c: any, next: any) => { + for (const key of Object.keys(customContext ?? {})) { + // @ts-ignore + c[key] = customContext![key]; + } + + return handler(c, next); + }; + + for (const { + method, + pathOrHandlers: [maybePathOrHandler, ...handlers], + } of routes) { + let path = "/"; + if (method === "GET" || method === "POST") { + // register collected "GET" or "POST" path + handlers to the underlying hono instance + // from https://github.com/honojs/hono/blob/main/src/hono-base.ts#L125-L142 + if (typeof maybePathOrHandler === "string") { + path = maybePathOrHandler; + } else { + // @ts-expect-error access private property + hono.addRoute(method, path, addCustomContext(maybePathOrHandler)); + } + + for (const handler of handlers) { + if (typeof handler !== "string") { + // @ts-expect-error access private property + hono.addRoute(method, path, addCustomContext(handler)); + } + } + } else { + // register collected middleware to the underlying hono instance + // from: https://github.com/honojs/hono/blob/main/src/hono-base.ts#L158-L169 + if (typeof maybePathOrHandler === "string") { + path = maybePathOrHandler; + } else { + path = "*"; + handlers.unshift(maybePathOrHandler); + } + for (const handler of handlers) { + // @ts-expect-error access private property + hono.addRoute("ALL", path, addCustomContext(handler)); + } + } + } + + return hono; +}; diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 49c3208bf..e198bd629 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -23,3 +23,38 @@ export type ContractConfig = Prettify; export type NetworkConfig = Prettify; export type BlockConfig = Prettify; export type DatabaseConfig = Prettify; + +export { graphql } from "@/graphql/index.js"; + +export { + sql, + eq, + gt, + gte, + lt, + lte, + ne, + isNull, + isNotNull, + inArray, + notInArray, + exists, + notExists, + between, + notBetween, + like, + notIlike, + not, + asc, + desc, + and, + or, + count, + countDistinct, + avg, + avgDistinct, + sum, + sumDistinct, + max, + min, +} from "drizzle-orm"; diff --git a/packages/core/src/indexing-store/metadata.ts b/packages/core/src/indexing-store/metadata.ts index d5ec764bd..cff041587 100644 --- a/packages/core/src/indexing-store/metadata.ts +++ b/packages/core/src/indexing-store/metadata.ts @@ -8,11 +8,11 @@ export const getMetadataStore = ({ db, }: { encoding: "sqlite" | "postgres"; - namespaceInfo: NamespaceInfo; + namespaceInfo: Pick; db: HeadlessKysely; }): MetadataStore => ({ getStatus: async () => { - return db.wrap({ method: "_ponder_meta.getLatest()" }, async () => { + return db.wrap({ method: "_ponder_meta.getStatus()" }, async () => { const metadata = await db .withSchema(namespaceInfo.userNamespace) .selectFrom("_ponder_meta") @@ -28,7 +28,7 @@ export const getMetadataStore = ({ }); }, setStatus: (status: Status) => { - return db.wrap({ method: "_ponder_meta.setLatest()" }, async () => { + return db.wrap({ method: "_ponder_meta.setStatus()" }, async () => { await db .withSchema(namespaceInfo.userNamespace) .insertInto("_ponder_meta") diff --git a/packages/core/src/indexing-store/readonly.ts b/packages/core/src/indexing-store/readonly.ts index 790c8d1bf..19c50cae2 100644 --- a/packages/core/src/indexing-store/readonly.ts +++ b/packages/core/src/indexing-store/readonly.ts @@ -29,7 +29,7 @@ export const getReadonlyStore = ({ }: { encoding: "sqlite" | "postgres"; schema: Schema; - namespaceInfo: NamespaceInfo; + namespaceInfo: Pick; db: HeadlessKysely; common: Common; }): ReadonlyStore => ({ diff --git a/packages/core/src/indexing/addStackTrace.ts b/packages/core/src/indexing/addStackTrace.ts index fd10a80d9..84bed7a45 100644 --- a/packages/core/src/indexing/addStackTrace.ts +++ b/packages/core/src/indexing/addStackTrace.ts @@ -3,6 +3,10 @@ import type { Options } from "@/common/options.js"; import { codeFrameColumns } from "@babel/code-frame"; import { type StackFrame, parse as parseStackTrace } from "stacktrace-parser"; +// Note: this currently works for both indexing functions and api +// routes only because the api route dir is a subdir of the indexing function +// dir. + export const addStackTrace = (error: Error, options: Options) => { if (!error.stack) return; @@ -13,12 +17,12 @@ export const addStackTrace = (error: Error, options: Options) => { // Find first frame that occurred within user code. const firstUserFrameIndex = stackTrace.findIndex((frame) => - frame.file?.includes(options.srcDir), + frame.file?.includes(options.indexingDir), ); if (firstUserFrameIndex >= 0) { userStackTrace = stackTrace.filter((frame) => - frame.file?.includes(options.srcDir), + frame.file?.includes(options.indexingDir), ); const firstUserFrame = stackTrace[firstUserFrameIndex]; diff --git a/packages/core/src/schema/common.ts b/packages/core/src/schema/common.ts index 17422773a..f749720ef 100644 --- a/packages/core/src/schema/common.ts +++ b/packages/core/src/schema/common.ts @@ -189,6 +189,7 @@ export type ExtractNonVirtualColumnNames< | ReferenceColumn | ScalarColumn | EnumColumn + | JSONColumn ? columnNames : never : never; diff --git a/packages/core/src/server/error.ts b/packages/core/src/server/error.ts new file mode 100644 index 000000000..56976ec9c --- /dev/null +++ b/packages/core/src/server/error.ts @@ -0,0 +1,67 @@ +import type { Common } from "@/common/common.js"; +import type { BaseError } from "@/common/errors.js"; +import { addStackTrace } from "@/indexing/addStackTrace.js"; +import { prettyPrint } from "@/utils/print.js"; +import type { Context, HonoRequest } from "hono"; +import { html } from "hono/html"; + +export const onError = async (_error: Error, c: Context, common: Common) => { + const error = _error as BaseError; + + // Find the filename where the error occurred + const regex = /(\S+\.(?:js|ts|mjs|cjs)):\d+:\d+/; + const matches = error.stack?.match(regex); + const errorFile = (() => { + if (!matches?.[0]) return undefined; + const path = matches[0].trim(); + if (path.startsWith("(")) { + return path.slice(1); + } else if (path.startsWith("file://")) { + return path.slice(7); + } + return path; + })(); + + addStackTrace(error, common.options); + + error.meta = Array.isArray(error.meta) ? error.meta : []; + error.meta.push( + `Request:\n${prettyPrint({ + path: c.req.path, + method: c.req.method, + body: await tryExtractRequestBody(c.req), + })}`, + ); + + common.logger.warn({ + service: "server", + msg: `An error occurred while handling a '${c.req.method}' request to the route '${c.req.path}'`, + error, + }); + + // 500: Internal Server Error + return c.text( + `${error.name}: ${error.message} occurred in '${errorFile}' while handling a '${c.req.method}' request to the route '${c.req.path}'`, + 500, + ); +}; + +export const onNotFound = (c: Context) => { + return c.html( + html` +

Bad news!

+

The route "${c.req.path}" does not exist

`, + ); +}; + +const tryExtractRequestBody = async (request: HonoRequest) => { + try { + return await request.json(); + } catch { + try { + const text = await request.text(); + if (text !== "") return text; + } catch {} + } + return undefined; +}; diff --git a/packages/core/src/server/service.test.ts b/packages/core/src/server/service.test.ts index feec0cd91..fd807e25f 100644 --- a/packages/core/src/server/service.test.ts +++ b/packages/core/src/server/service.test.ts @@ -3,58 +3,55 @@ import { setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import type { - HistoricalStore, - MetadataStore, - ReadonlyStore, -} from "@/indexing-store/store.js"; -import { createSchema } from "@/schema/schema.js"; -import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; -import type { GraphQLSchema } from "graphql"; +import type { Context } from "@/hono/context.js"; +import { getMetadataStore } from "@/indexing-store/metadata.js"; +import { Hono } from "hono"; import { beforeEach, expect, test, vi } from "vitest"; -import { buildGraphqlSchema } from "./graphql/buildGraphqlSchema.js"; import { createServer } from "./service.js"; beforeEach(setupCommon); beforeEach(setupIsolatedDatabase); -const getMockMetadataStore = (ready: boolean) => - ({ - getStatus() { - return Promise.resolve({ mainnet: { ready } }); - }, - }) as unknown as MetadataStore; - test("port", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); + const server1 = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); const server2 = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); expect(server2.port).toBeGreaterThanOrEqual(server1.port + 1); await server1.kill(); await server2.kill(); + await cleanup(); }); test("not healthy", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); + const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, - common: { - ...context.common, - options: { ...context.common.options, maxHealthcheckDuration: 5 }, - }, - readonlyStore: {} as ReadonlyStore, - metadataStore: getMockMetadataStore(false), + common: context.common, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); const response = await server.hono.request("/health"); @@ -62,50 +59,73 @@ test("not healthy", async (context) => { expect(response.status).toBe(503); await server.kill(); + await cleanup(); }); test("healthy", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); + const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, - common: { - ...context.common, - options: { ...context.common.options, maxHealthcheckDuration: 0 }, - }, - readonlyStore: {} as ReadonlyStore, - metadataStore: getMockMetadataStore(true), + common: context.common, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); + await getMetadataStore({ + encoding: database.kind, + namespaceInfo, + db: database.indexingDb, + }).setStatus({}); + const response = await server.hono.request("/health"); expect(response.status).toBe(200); await server.kill(); + await cleanup(); }); test("healthy PUT", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); + const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: { ...context.common, options: { ...context.common.options, maxHealthcheckDuration: 0 }, }, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); - const response = await server.hono.request("/health", { method: "PUT" }); + const response = await server.hono.request("/health", { + method: "PUT", + }); expect(response.status).toBe(404); await server.kill(); + await cleanup(); }); test("metrics", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); + const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); const response = await server.hono.request("/metrics"); @@ -113,14 +133,20 @@ test("metrics", async (context) => { expect(response.status).toBe(200); await server.kill(); + await cleanup(); }); test("metrics error", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); + const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); const metricsSpy = vi.spyOn(context.common.metrics, "getMetrics"); @@ -131,384 +157,113 @@ test("metrics error", async (context) => { expect(response.status).toBe(500); await server.kill(); + await cleanup(); }); test("metrics PUT", async (context) => { - const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, - common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, - }); - - const response = await server.hono.request("/metrics", { method: "PUT" }); - - expect(response.status).toBe(404); - - await server.kill(); -}); - -test("graphql", async (context) => { - const schema = createSchema((p) => ({ - table: p.createTable({ - id: p.string(), - string: p.string(), - int: p.int(), - float: p.float(), - boolean: p.boolean(), - hex: p.hex(), - bigint: p.bigint(), - }), - })); - - const { indexingStore, readonlyStore, cleanup } = await setupDatabaseServices( - context, - { - schema, - }, - ); - - await indexingStore.create({ - tableName: "table", - encodedCheckpoint: encodeCheckpoint(zeroCheckpoint), - id: "0", - data: { - string: "0", - int: 0, - float: 0, - boolean: false, - hex: "0x0", - bigint: 0n, - }, - }); - - await (indexingStore as HistoricalStore).flush({ isFullFlush: true }); - - const graphqlSchema = buildGraphqlSchema(schema); + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); const server = await createServer({ - graphqlSchema: graphqlSchema, common: context.common, - readonlyStore: readonlyStore, - metadataStore: getMockMetadataStore(true), + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); - const response = await server.hono.request("/graphql", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - query: ` - query { - table(id: "0") { - id - string - int - float - boolean - hex - bigint - } - } - `, - }), + const response = await server.hono.request("/metrics", { + method: "PUT", }); - expect(response.status).toBe(200); - - expect(await response.json()).toMatchObject({ - data: { - table: { - id: "0", - string: "0", - int: 0, - float: 0, - boolean: false, - hex: "0x00", - bigint: "0", - }, - }, - }); - - await cleanup(); + expect(response.status).toBe(404); await server.kill(); + await cleanup(); }); -test("graphql extra filter", async (context) => { - const schema = createSchema((p) => ({ - table: p.createTable({ - id: p.string(), - string: p.string(), - int: p.int(), - float: p.float(), - boolean: p.boolean(), - hex: p.hex(), - bigint: p.bigint(), - }), - })); - - const { readonlyStore, cleanup } = await setupDatabaseServices(context, { - schema, - }); - - const graphqlSchema = buildGraphqlSchema(schema); +test("missing route", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); const server = await createServer({ - graphqlSchema: graphqlSchema, common: context.common, - readonlyStore: readonlyStore, - metadataStore: getMockMetadataStore(true), + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); - const response = await server.hono.request("/graphql", { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - query: ` - { - table(id: "0", doesntExist: "kevin") { - id - string - int - float - boolean - hex - bigint - } - } - `, - }), - }); - - expect(response.status).toBe(200); - const body = await response.json(); - expect(body.errors[0].message).toBe( - 'Unknown argument "doesntExist" on field "Query.table".', - ); + const response = await server.hono.request("/kevin"); - await cleanup(); + expect(response.status).toBe(404); await server.kill(); -}); - -test("graphql token limit error", async (context) => { - const schema = createSchema((p) => ({ - table: p.createTable({ id: p.string() }), - })); - - const { readonlyStore, cleanup } = await setupDatabaseServices(context, { - schema, - }); - - const graphqlSchema = buildGraphqlSchema(schema); - - const server = await createServer({ - graphqlSchema: graphqlSchema, - common: { - ...context.common, - options: { ...context.common.options, graphqlMaxOperationTokens: 3 }, - }, - readonlyStore: readonlyStore, - metadataStore: getMockMetadataStore(true), - }); - - const response = await server.hono.request("/graphql", { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - query: ` - { - __schema { - types { - fields { - type { - fields { - type { - description - } - } - } - } - } - } - } - `, - }), - }); - - expect(response.status).toBe(200); - const body = await response.json(); - expect(body.errors[0].message).toBe( - "Syntax Error: Token limit of 3 exceeded.", - ); - await cleanup(); - - await server.kill(); }); -test("graphql depth limit error", async (context) => { - const schema = createSchema((p) => ({ - table: p.createTable({ id: p.string() }), - })); - - const { readonlyStore, cleanup } = await setupDatabaseServices(context, { - schema, - }); - - const graphqlSchema = buildGraphqlSchema(schema); +test("custom api route", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); const server = await createServer({ - graphqlSchema: graphqlSchema, - common: { - ...context.common, - options: { ...context.common.options, graphqlMaxOperationDepth: 5 }, - }, - readonlyStore: readonlyStore, - metadataStore: getMockMetadataStore(true), + common: context.common, + app: new Hono(), + routes: [ + { method: "GET", pathOrHandlers: ["/hi", (c: Context) => c.text("hi")] }, + ], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); - const response = await server.hono.request("/graphql", { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - query: ` - { - __schema { - types { - fields { - type { - fields { - type { - description - } - } - } - } - } - } - } - `, - }), - }); + const response = await server.hono.request("/hi"); expect(response.status).toBe(200); - const body = await response.json(); - expect(body.errors[0].message).toBe( - "Syntax Error: Query depth limit of 5 exceeded, found 7.", - ); - - await cleanup(); + expect(await response.text()).toBe("hi"); await server.kill(); -}); - -test("graphql max aliases error", async (context) => { - const schema = createSchema((p) => ({ - table: p.createTable({ id: p.string() }), - })); - - const { readonlyStore, cleanup } = await setupDatabaseServices(context, { - schema, - }); - - const graphqlSchema = buildGraphqlSchema(schema); - - const server = await createServer({ - graphqlSchema: graphqlSchema, - common: { - ...context.common, - options: { ...context.common.options, graphqlMaxOperationAliases: 2 }, - }, - readonlyStore: readonlyStore, - metadataStore: getMockMetadataStore(true), - }); - - const response = await server.hono.request("/graphql", { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - query: ` - { - __schema { - types { - fields { - type { - alias1: fields { - type { - description - } - } - alias2: fields { - type { - description - } - } - alias3: fields { - type { - description - } - } - } - } - } - } - } - `, - }), - }); - - expect(response.status).toBe(200); - const body = await response.json(); - expect(body.errors[0].message).toBe( - "Syntax Error: Aliases limit of 2 exceeded, found 3.", - ); - await cleanup(); - - await server.kill(); }); -test("graphql interactive", async (context) => { - const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, - common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, - }); - - const response = await server.hono.request("/graphql"); +test("custom hono route", async (context) => { + const { database, namespaceInfo, cleanup } = + await setupDatabaseServices(context); - expect(response.status).toBe(200); - - await server.kill(); -}); + const app = new Hono().get("/hi", (c) => c.text("hi")); -test("missing route", async (context) => { const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app, + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); - const response = await server.hono.request("/kevin"); + const response = await server.hono.request("/hi"); - expect(response.status).toBe(404); + expect(response.status).toBe(200); + expect(await response.text()).toBe("hi"); await server.kill(); + await cleanup(); }); // Note that this test doesn't work because the `hono.request` method doesn't actually // create a socket connection, it just calls the request handler function directly. test.skip("kill", async (context) => { + const { database, namespaceInfo } = await setupDatabaseServices(context); + const server = await createServer({ - graphqlSchema: {} as GraphQLSchema, common: context.common, - readonlyStore: {} as ReadonlyStore, - metadataStore: {} as MetadataStore, + app: new Hono(), + routes: [], + schema: {}, + database, + dbNamespace: namespaceInfo.userNamespace, }); await server.kill(); diff --git a/packages/core/src/server/service.ts b/packages/core/src/server/service.ts index 7fc0cef91..4e17264bf 100644 --- a/packages/core/src/server/service.ts +++ b/packages/core/src/server/service.ts @@ -1,47 +1,59 @@ import http from "node:http"; import type { Common } from "@/common/common.js"; -import type { MetadataStore, ReadonlyStore } from "@/indexing-store/store.js"; -import { graphiQLHtml } from "@/ui/graphiql.html.js"; +import type { DatabaseService } from "@/database/service.js"; +import { createDrizzleDb, createDrizzleTables } from "@/drizzle/runtime.js"; +import { graphql } from "@/graphql/index.js"; +import { type PonderRoutes, applyHonoRoutes } from "@/hono/index.js"; +import { getMetadataStore } from "@/indexing-store/metadata.js"; +import { getReadonlyStore } from "@/indexing-store/readonly.js"; +import type { Schema } from "@/schema/common.js"; import { startClock } from "@/utils/timer.js"; -import { maxAliasesPlugin } from "@escape.tech/graphql-armor-max-aliases"; -import { maxDepthPlugin } from "@escape.tech/graphql-armor-max-depth"; -import { maxTokensPlugin } from "@escape.tech/graphql-armor-max-tokens"; import { serve } from "@hono/node-server"; -import { GraphQLError, type GraphQLSchema } from "graphql"; -import { createYoga } from "graphql-yoga"; import { Hono } from "hono"; import { cors } from "hono/cors"; import { createMiddleware } from "hono/factory"; import { createHttpTerminator } from "http-terminator"; -import { - type GetLoader, - buildLoaderCache, -} from "./graphql/buildLoaderCache.js"; +import { onError } from "./error.js"; type Server = { - hono: Hono<{ Variables: { store: ReadonlyStore; getLoader: GetLoader } }>; + hono: Hono; port: number; kill: () => Promise; }; export async function createServer({ - graphqlSchema, - readonlyStore, - metadataStore, + app: userApp, + routes: userRoutes, common, + schema, + database, + dbNamespace, }: { - graphqlSchema: GraphQLSchema; - readonlyStore: ReadonlyStore; - metadataStore: MetadataStore; + app: Hono; + routes: PonderRoutes; common: Common; + schema: Schema; + database: DatabaseService; + dbNamespace: string; }): Promise { - const hono = new Hono<{ - Variables: { store: ReadonlyStore; getLoader: GetLoader }; - }>(); + // Create hono app - let port = common.options.port; const startTime = Date.now(); + const readonlyStore = getReadonlyStore({ + encoding: database.kind, + schema, + namespaceInfo: { userNamespace: dbNamespace }, + db: database.readonlyDb, + common, + }); + + const metadataStore = getMetadataStore({ + encoding: database.kind, + namespaceInfo: { userNamespace: dbNamespace }, + db: database.readonlyDb, + }); + const metricsMiddleware = createMiddleware(async (c, next) => { const commonLabels = { method: c.req.method, path: c.req.path }; common.metrics.ponder_http_server_active_requests.inc(commonLabels); @@ -78,40 +90,22 @@ export async function createServer({ } }); - const createGraphqlYoga = (path: string) => - createYoga({ - schema: graphqlSchema, - context: () => { - const getLoader = buildLoaderCache({ store: readonlyStore }); - return { getLoader, readonlyStore, metadataStore }; - }, - graphqlEndpoint: path, - maskedErrors: process.env.NODE_ENV === "production", - logging: false, - graphiql: false, - parserAndValidationCache: false, - plugins: [ - maxTokensPlugin({ n: common.options.graphqlMaxOperationTokens }), - maxDepthPlugin({ - n: common.options.graphqlMaxOperationDepth, - ignoreIntrospection: false, - }), - maxAliasesPlugin({ - n: common.options.graphqlMaxOperationAliases, - allowList: [], - }), - ], - }); - - const rootYoga = createGraphqlYoga("/"); - const rootGraphiql = graphiQLHtml("/"); - - const prodYoga = createGraphqlYoga("/graphql"); - const prodGraphiql = graphiQLHtml("/graphql"); + const db = createDrizzleDb(database); + const tables = createDrizzleTables(schema, database, dbNamespace); + + // context required for graphql middleware and hono middleware + const contextMiddleware = createMiddleware(async (c, next) => { + c.set("db", db); + c.set("tables", tables); + c.set("readonlyStore", readonlyStore); + c.set("metadataStore", metadataStore); + c.set("schema", schema); + await next(); + }); - hono - .use(cors()) + const hono = new Hono() .use(metricsMiddleware) + .use(cors()) .get("/metrics", async (c) => { try { const metrics = await common.metrics.getMetrics(); @@ -143,33 +137,38 @@ export async function createServer({ return c.text("Historical indexing is not complete.", 503); }) - // Renders GraphiQL - .get("/graphql", (c) => c.html(prodGraphiql)) - // Serves GraphQL POST requests following healthcheck rules - .post("/graphql", async (c) => { - const status = await metadataStore.getStatus(); - if ( - status === null || - Object.values(status).some(({ ready }) => ready === false) - ) { - return c.json( - { errors: [new GraphQLError("Historical indexing is not complete")] }, - 503, - ); - } - - return prodYoga.handle(c.req.raw); - }) - // Renders GraphiQL - .get("/", (c) => c.html(rootGraphiql)) - // Serves GraphQL POST requests regardless of health status, e.g. "dev UI" - .post("/", (c) => rootYoga.handle(c.req.raw)) .get("/status", async (c) => { const status = await metadataStore.getStatus(); return c.json(status); + }) + .use(contextMiddleware); + + if (userRoutes.length === 0 && userApp.routes.length === 0) { + // apply graphql middleware if no custom api exists + hono.use("/graphql", graphql()); + hono.use("/", graphql()); + } else { + // apply user routes to hono instance, registering a custom error handler + applyHonoRoutes(hono, userRoutes, { db, tables }).onError((error, c) => + onError(error, c, common), + ); + + common.logger.debug({ + service: "server", + msg: `Detected a custom server with routes: [${userRoutes + .map(({ pathOrHandlers: [maybePathOrHandler] }) => maybePathOrHandler) + .filter((maybePathOrHandler) => typeof maybePathOrHandler === "string") + .join(", ")}]`, }); + hono.route("/", userApp); + } + + // Create nodejs server + + let port = common.options.port; + const createServerWithNextAvailablePort: typeof http.createServer = ( ...args: any ) => { @@ -234,7 +233,6 @@ export async function createServer({ return { hono, port, - kill: () => terminator.terminate(), }; } diff --git a/packages/core/src/types/api.ts b/packages/core/src/types/api.ts new file mode 100644 index 000000000..49b77ce4d --- /dev/null +++ b/packages/core/src/types/api.ts @@ -0,0 +1,27 @@ +import type { DrizzleDb } from "@/drizzle/db.js"; +import type { DrizzleTable } from "@/drizzle/table.js"; +import type { + HandlerInterface, + MiddlewareHandlerInterface, +} from "@/hono/handler.js"; +import type { ExtractTableNames, Schema } from "@/schema/common.js"; +import type { Hono } from "hono"; + +export type ApiContext = { + db: DrizzleDb; + tables: { + [tableName in ExtractTableNames]: DrizzleTable< + tableName, + // @ts-ignore + schema[tableName]["table"], + schema + >; + }; +}; + +export type ApiRegistry = { + get: HandlerInterface; + post: HandlerInterface; + use: MiddlewareHandlerInterface; + hono: Hono<{ Variables: ApiContext }>; +}; diff --git a/packages/core/src/types/virtual.test-d.ts b/packages/core/src/types/virtual.test-d.ts index 27a4dbee1..21eabf114 100644 --- a/packages/core/src/types/virtual.test-d.ts +++ b/packages/core/src/types/virtual.test-d.ts @@ -1,4 +1,5 @@ import { createConfig } from "@/config/config.js"; +import type { DrizzleDb } from "@/drizzle/db.js"; import { createSchema, createTable } from "@/schema/schema.js"; import { http, type Abi, type Address, type Hex, parseAbiItem } from "viem"; import { assertType, test } from "vitest"; @@ -437,3 +438,9 @@ test("Registry", () => { context.contracts.c2; }); }); + +test("Drizzle", () => { + type a = Virtual.Drizzle; + + assertType({} as any as { db: DrizzleDb; tables: { table: any } }); +}); diff --git a/packages/core/src/types/virtual.ts b/packages/core/src/types/virtual.ts index e4f2631ed..6bea330eb 100644 --- a/packages/core/src/types/virtual.ts +++ b/packages/core/src/types/virtual.ts @@ -17,6 +17,7 @@ import type { TransactionReceipt, } from "@/types/eth.js"; import type { DatabaseModel } from "@/types/model.js"; +import type { ApiRegistry, ApiContext as _ApiContext } from "./api.js"; import type { Prettify } from "./utils.js"; export namespace Virtual { @@ -226,6 +227,8 @@ export namespace Virtual { }; }; + export type Drizzle = _ApiContext; + export type IndexingFunctionArgs< config extends Config, schema extends BuilderSchema, @@ -246,5 +249,5 @@ export namespace Virtual { }, ) => Promise | void, ) => void; - }; + } & ApiRegistry; } diff --git a/packages/core/src/utils/pg.ts b/packages/core/src/utils/pg.ts index 3e2ef72d5..2d8664d7b 100644 --- a/packages/core/src/utils/pg.ts +++ b/packages/core/src/utils/pg.ts @@ -44,6 +44,28 @@ pg.Client.prototype.query = function query( } }; +class ReadonlyClient extends pg.Client { + // @ts-expect-error + override connect( + callback: (err: Error) => void | undefined, + ): void | Promise { + if (callback) { + super.connect(() => { + this.query( + "SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY", + callback, + ); + }); + } else { + return super.connect().then(async () => { + await this.query( + "SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY", + ); + }); + } + } +} + export function createPool(config: PoolConfig) { return new pg.Pool({ // https://stackoverflow.com/questions/59155572/how-to-set-query-timeout-in-relation-to-statement-timeout @@ -51,3 +73,13 @@ export function createPool(config: PoolConfig) { ...config, }); } + +export function createReadonlyPool(config: PoolConfig) { + return new pg.Pool({ + // https://stackoverflow.com/questions/59155572/how-to-set-query-timeout-in-relation-to-statement-timeout + statement_timeout: 2 * 60 * 1000, // 2 minutes + // @ts-expect-error: The custom Client is an undocumented option. + Client: ReadonlyClient, + ...config, + }); +} diff --git a/packages/core/src/utils/sqlite.ts b/packages/core/src/utils/sqlite.ts index c1f48fb1d..7a9280f2e 100644 --- a/packages/core/src/utils/sqlite.ts +++ b/packages/core/src/utils/sqlite.ts @@ -75,3 +75,14 @@ export function createSqliteDatabase( database.pragma("journal_mode = WAL"); return database; } + +export function createReadonlySqliteDatabase( + file: string, + options?: BetterSqlite3.Options, +): SqliteDatabase { + ensureDirExists(file); + const database = new BetterSqlite3(file, { readonly: true, ...options }); + improveSqliteErrors(database); + database.pragma("journal_mode = WAL"); + return database; +} diff --git a/packages/create-ponder/src/index.ts b/packages/create-ponder/src/index.ts index e51174338..fc03bab3c 100644 --- a/packages/create-ponder/src/index.ts +++ b/packages/create-ponder/src/index.ts @@ -80,6 +80,11 @@ const templates = [ title: "Feature - Custom event filter", description: "A Ponder app using an event filter", }, + { + id: "feature-api-functions", + title: "Feature - Custom api functions", + description: "A Ponder app using a custom api functions", + }, { id: "feature-blocks", title: "Feature - Block filter", diff --git a/packages/create-ponder/templates/empty/package.json b/packages/create-ponder/templates/empty/package.json index 083cd2e68..0b6368412 100644 --- a/packages/create-ponder/templates/empty/package.json +++ b/packages/create-ponder/templates/empty/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "^0.0.95", + "hono": "^4.5.0", "viem": "^1.19.3" }, "devDependencies": { diff --git a/packages/create-ponder/templates/empty/ponder-env.d.ts b/packages/create-ponder/templates/empty/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/packages/create-ponder/templates/empty/ponder-env.d.ts +++ b/packages/create-ponder/templates/empty/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/packages/create-ponder/templates/etherscan/package.json b/packages/create-ponder/templates/etherscan/package.json index 94e7e0c44..c69bab9ea 100644 --- a/packages/create-ponder/templates/etherscan/package.json +++ b/packages/create-ponder/templates/etherscan/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "^0.0.95", + "hono": "^4.5.0", "viem": "^1.19.3" }, "devDependencies": { diff --git a/packages/create-ponder/templates/etherscan/ponder-env.d.ts b/packages/create-ponder/templates/etherscan/ponder-env.d.ts index f8e7347cf..03126bf92 100644 --- a/packages/create-ponder/templates/etherscan/ponder-env.d.ts +++ b/packages/create-ponder/templates/etherscan/ponder-env.d.ts @@ -21,6 +21,7 @@ declare module "@/generated" { schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = Virtual.IndexingFunctionArgs; export type Schema = Virtual.Schema; diff --git a/packages/create-ponder/templates/subgraph/package.json b/packages/create-ponder/templates/subgraph/package.json index 94e7e0c44..c69bab9ea 100644 --- a/packages/create-ponder/templates/subgraph/package.json +++ b/packages/create-ponder/templates/subgraph/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@ponder/core": "^0.0.95", + "hono": "^4.5.0", "viem": "^1.19.3" }, "devDependencies": { diff --git a/packages/create-ponder/templates/subgraph/ponder-env.d.ts b/packages/create-ponder/templates/subgraph/ponder-env.d.ts index 2f5363e0c..1169bd3f1 100644 --- a/packages/create-ponder/templates/subgraph/ponder-env.d.ts +++ b/packages/create-ponder/templates/subgraph/ponder-env.d.ts @@ -25,6 +25,7 @@ declare module "@/generated" { Schema, name >; + export type ApiContext = Virtual.Drizzle; export type IndexingFunctionArgs = { event: Event; context: Context; diff --git a/packages/create-ponder/tsup.config.ts b/packages/create-ponder/tsup.config.ts index e762cc512..5a18d583c 100644 --- a/packages/create-ponder/tsup.config.ts +++ b/packages/create-ponder/tsup.config.ts @@ -26,6 +26,7 @@ export default defineConfig({ path.join(examplesPath, "**", "*"), "!**/with-nextjs/**", "!**/with-foundry/**", + "!**/with-trpc/**", "!**/node_modules/**", "!**/generated/**", "!**/.ponder/**", diff --git a/patches/graphql@16.8.1.patch b/patches/graphql@16.8.1.patch deleted file mode 100644 index 730b48159..000000000 --- a/patches/graphql@16.8.1.patch +++ /dev/null @@ -1,3 +0,0 @@ -diff --git a/index.mjs b/index.mjs -deleted file mode 100644 -index ba8672e675b69675b8d37d3aa521ee2298eacdcc..0000000000000000000000000000000000000000 \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 108b1a298..624a83907 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,6 @@ patchedDependencies: detect-package-manager@3.0.1: hash: tkhlb7gk5ij4zxchvtwu3teirq path: patches/detect-package-manager@3.0.1.patch - graphql@16.8.1: - hash: 3zvcnrptpojleshpmtp6be677a - path: patches/graphql@16.8.1.patch importers: @@ -25,6 +22,9 @@ importers: '@changesets/cli': specifier: ^2.26.2 version: 2.27.1 + hono: + specifier: 4.5.0 + version: 4.5.0 lint-staged: specifier: ^15.1.0 version: 15.2.0 @@ -141,14 +141,42 @@ importers: specifier: ^5.2.2 version: 5.3.3 + examples/feature-api-functions: + dependencies: + '@ponder/core': + specifier: workspace:* + version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 + viem: + specifier: ^1.19.9 + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) + devDependencies: + '@types/node': + specifier: ^20.10.0 + version: 20.11.24 + eslint: + specifier: ^8.54.0 + version: 8.56.0 + eslint-config-ponder: + specifier: workspace:* + version: link:../../packages/eslint-config-ponder + typescript: + specifier: ^5.3.2 + version: 5.3.3 + examples/feature-blocks: dependencies: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -168,9 +196,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -193,9 +224,12 @@ importers: abitype: specifier: ^0.10.2 version: 0.10.3(typescript@5.3.3) + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -215,9 +249,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -237,9 +274,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -259,9 +299,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -281,9 +324,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -303,9 +349,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -325,9 +374,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -347,9 +399,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -369,9 +424,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -391,9 +449,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -413,9 +474,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.10.0 @@ -447,9 +511,12 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.3 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20.9.0 @@ -473,10 +540,10 @@ importers: version: 5.12.2(react@18.2.0) graphql: specifier: ^16.8.1 - version: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + version: 16.8.2 graphql-request: specifier: ^6.1.0 - version: 6.1.0(graphql@16.8.1) + version: 6.1.0(graphql@16.8.2) next: specifier: 14.0.3 version: 14.0.3(react-dom@18.2.0)(react@18.2.0) @@ -491,7 +558,7 @@ importers: version: 18.2.0(react@18.2.0) viem: specifier: ^1.19.11 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) devDependencies: '@types/node': specifier: ^20 @@ -526,9 +593,54 @@ importers: '@ponder/core': specifier: workspace:* version: link:../../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 + viem: + specifier: ^1.19.9 + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) + devDependencies: + '@types/node': + specifier: ^20.10.0 + version: 20.11.24 + eslint: + specifier: ^8.54.0 + version: 8.56.0 + eslint-config-ponder: + specifier: workspace:* + version: link:../../../packages/eslint-config-ponder + typescript: + specifier: ^5.3.2 + version: 5.3.3 + + examples/with-trpc: {} + + examples/with-trpc/client: + dependencies: + '@trpc/client': + specifier: ^10.45.2 + version: 10.45.2 + + examples/with-trpc/ponder: + dependencies: + '@hono/trpc-server': + specifier: ^0.3.2 + version: 0.3.2(@trpc/server@10.45.2)(hono@4.5.0) + '@ponder/core': + specifier: workspace:* + version: link:../../../packages/core + '@trpc/server': + specifier: ^10.45.2 + version: 10.45.2 + hono: + specifier: ^4.5.0 + version: 4.5.0 viem: specifier: ^1.19.9 - version: 1.21.4(typescript@5.3.3) + version: 1.21.4(typescript@5.3.3)(zod@3.23.8) + zod: + specifier: ^3.23.8 + version: 3.23.8 devDependencies: '@types/node': specifier: ^20.10.0 @@ -568,16 +680,16 @@ importers: version: 12.0.1(commander@12.0.0) '@escape.tech/graphql-armor-max-aliases': specifier: ^2.3.0 - version: 2.3.0 + version: 2.4.0 '@escape.tech/graphql-armor-max-depth': specifier: ^2.2.0 - version: 2.2.0 + version: 2.3.0 '@escape.tech/graphql-armor-max-tokens': specifier: ^2.3.0 - version: 2.3.0 + version: 2.4.0 '@hono/node-server': specifier: ^1.11.2 - version: 1.11.2 + version: 1.11.3 '@ponder/utils': specifier: workspace:* version: link:../utils @@ -602,6 +714,9 @@ importers: dotenv: specifier: ^16.3.1 version: 16.3.1 + drizzle-orm: + specifier: ^0.31.2 + version: 0.31.2(@types/better-sqlite3@7.6.10)(@types/pg@8.10.9)(@types/react@18.2.46)(better-sqlite3@11.1.2)(kysely@0.26.3)(pg@8.11.3)(react@18.2.0) emittery: specifier: ^1.0.1 version: 1.0.1 @@ -613,16 +728,10 @@ importers: version: 10.3.10 graphql: specifier: ^16.8.1 - version: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) - graphql-type-json: - specifier: ^0.3.2 - version: 0.3.2(graphql@16.8.1) + version: 16.8.2 graphql-yoga: specifier: ^5.3.0 - version: 5.3.0(graphql@16.8.1) - hono: - specifier: ^4.4.2 - version: 4.4.2 + version: 5.3.1(graphql@16.8.2) http-terminator: specifier: ^3.2.0 version: 3.2.0 @@ -632,9 +741,6 @@ importers: kysely: specifier: ^0.26.3 version: 0.26.3 - magic-string: - specifier: ^0.30.5 - version: 0.30.5 p-queue: specifier: ^7.4.1 version: 7.4.1 @@ -2419,16 +2525,6 @@ packages: '@jridgewell/trace-mapping': 0.3.9 dev: true - /@envelop/core@4.0.3: - resolution: {integrity: sha512-O0Vz8E0TObT6ijAob8jYFVJavcGywKThM3UAsxUIBBVPYZTMiqI9lo2gmAnbMUnrDcAYkUTZEW9FDYPRdF5l6g==} - engines: {node: '>=16.0.0'} - requiresBuild: true - dependencies: - '@envelop/types': 4.0.1 - tslib: 2.6.2 - dev: false - optional: true - /@envelop/core@5.0.1: resolution: {integrity: sha512-wxA8EyE1fPnlbP0nC/SFI7uU8wSNf4YjxZhAPu0P63QbgIvqHtHsH4L3/u+rsTruzhk3OvNRgQyLsMfaR9uzAQ==} engines: {node: '>=18.0.0'} @@ -2438,15 +2534,6 @@ packages: tslib: 2.6.2 dev: false - /@envelop/types@4.0.1: - resolution: {integrity: sha512-ULo27/doEsP7uUhm2iTnElx13qTO6I5FKvmLoX41cpfuw8x6e0NUFknoqhEsLzAbgz8xVS5mjwcxGCXh4lDYzg==} - engines: {node: '>=16.0.0'} - requiresBuild: true - dependencies: - tslib: 2.6.2 - dev: false - optional: true - /@envelop/types@5.0.0: resolution: {integrity: sha512-IPjmgSc4KpQRlO4qbEDnBEixvtb06WDmjKfi/7fkZaryh5HuOmTtixe1EupQI5XfXO8joc3d27uUZ0QdC++euA==} engines: {node: '>=18.0.0'} @@ -2837,41 +2924,41 @@ packages: requiresBuild: true optional: true - /@escape.tech/graphql-armor-max-aliases@2.3.0: - resolution: {integrity: sha512-h0AfPx929MWBnDlWnn/hcLHHNIAnUjws30OmyPLj9GqVmsBpj3338LELvORuuf3N1ciWI0xgkQd3NRSrmgr3ig==} - engines: {node: '>=16.0.0'} + /@escape.tech/graphql-armor-max-aliases@2.4.0: + resolution: {integrity: sha512-d4V9EgtPRG9HIoPHuanFNLHj1ENB1YkZi9FbiBiH88x5VahCjVpMXDgKQGkG6RUTOODU4XKp0/ZgaOq0pX5oEA==} + engines: {node: '>=18.0.0'} dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 optionalDependencies: '@envelop/core': 5.0.1 - '@escape.tech/graphql-armor-types': 0.5.0 + '@escape.tech/graphql-armor-types': 0.6.0 dev: false - /@escape.tech/graphql-armor-max-depth@2.2.0: - resolution: {integrity: sha512-v0z2yelQL614mYFpYL/iRkieq/7H2XKbvJ6RvbGMFFSqo3eSIz8fyX0f6pyswR7myQxki4ur0MFxSn8S5jjfqw==} - engines: {node: '>=16.0.0'} + /@escape.tech/graphql-armor-max-depth@2.3.0: + resolution: {integrity: sha512-EgqJU2yOaKaFeNDqMn18fIOI6UNjboWV950G9I39ebXyxsQmIaAx0Hs9hJoCBEHdLY9SCKWsEZFipHXqvaphdw==} + engines: {node: '>=18.0.0'} dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 optionalDependencies: - '@envelop/core': 4.0.3 - '@escape.tech/graphql-armor-types': 0.5.0 + '@envelop/core': 5.0.1 + '@escape.tech/graphql-armor-types': 0.6.0 dev: false - /@escape.tech/graphql-armor-max-tokens@2.3.0: - resolution: {integrity: sha512-4aqtUhT4ONUVWY6Z7crjPFyOK2/quUGHFU3G2+s4GYFxQHn3F5HjdI2KoY5ot2Sdijh4X+gx0ebBjUzriLNtbg==} - engines: {node: '>=16.0.0'} + /@escape.tech/graphql-armor-max-tokens@2.4.0: + resolution: {integrity: sha512-apKQBcYc6vsrITR+uKGXTC9yWV4zUEP4usb5zO0vebYT6e4KLcS2gwIQOsDLCnD5IyU5sUOzHBWmkFyBPz5keQ==} + engines: {node: '>=18.0.0'} dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 optionalDependencies: '@envelop/core': 5.0.1 - '@escape.tech/graphql-armor-types': 0.5.0 + '@escape.tech/graphql-armor-types': 0.6.0 dev: false - /@escape.tech/graphql-armor-types@0.5.0: - resolution: {integrity: sha512-a7KMhb1qVHFFWw4bvGYQI637YaIZRozbfc+Fj1Vv/pwnTCJOzOgnvKO8+WBXJsFFGJ2Kj+fRORmSpz7J+lJF1w==} + /@escape.tech/graphql-armor-types@0.6.0: + resolution: {integrity: sha512-Y3X6JgkB1N1MMaHNXaE2IeJWIs6wT4XcPvXM8PRWmT2DblZfY4NYiV1mh0GTInKdlnrEr5hquOR9XV+M3Da43w==} requiresBuild: true dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 dev: false optional: true @@ -3076,8 +3163,8 @@ packages: hasBin: true dependencies: '@rescript/std': 9.0.0 - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) - graphql-import-node: 0.0.5(graphql@16.8.1) + graphql: 16.8.2 + graphql-import-node: 0.0.5(graphql@16.8.2) js-yaml: 4.1.0 dev: true @@ -3132,63 +3219,63 @@ packages: assemblyscript: 0.19.10 dev: true - /@graphql-tools/executor@1.2.6(graphql@16.8.1): + /@graphql-tools/executor@1.2.6(graphql@16.8.2): resolution: {integrity: sha512-+1kjfqzM5T2R+dCw7F4vdJ3CqG+fY/LYJyhNiWEFtq0ToLwYzR/KKyD8YuzTirEjSxWTVlcBh7endkx5n5F6ew==} engines: {node: '>=16.0.0'} peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 dependencies: - '@graphql-tools/utils': 10.2.0(graphql@16.8.1) - '@graphql-typed-document-node/core': 3.2.0(graphql@16.8.1) + '@graphql-tools/utils': 10.2.0(graphql@16.8.2) + '@graphql-typed-document-node/core': 3.2.0(graphql@16.8.2) '@repeaterjs/repeater': 3.0.5 - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 tslib: 2.6.2 value-or-promise: 1.0.12 dev: false - /@graphql-tools/merge@9.0.4(graphql@16.8.1): + /@graphql-tools/merge@9.0.4(graphql@16.8.2): resolution: {integrity: sha512-MivbDLUQ+4Q8G/Hp/9V72hbn810IJDEZQ57F01sHnlrrijyadibfVhaQfW/pNH+9T/l8ySZpaR/DpL5i+ruZ+g==} engines: {node: '>=16.0.0'} peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 dependencies: - '@graphql-tools/utils': 10.2.0(graphql@16.8.1) - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + '@graphql-tools/utils': 10.2.0(graphql@16.8.2) + graphql: 16.8.2 tslib: 2.6.2 dev: false - /@graphql-tools/schema@10.0.3(graphql@16.8.1): + /@graphql-tools/schema@10.0.3(graphql@16.8.2): resolution: {integrity: sha512-p28Oh9EcOna6i0yLaCFOnkcBDQECVf3SCexT6ktb86QNj9idnkhI+tCxnwZDh58Qvjd2nURdkbevvoZkvxzCog==} engines: {node: '>=16.0.0'} peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 dependencies: - '@graphql-tools/merge': 9.0.4(graphql@16.8.1) - '@graphql-tools/utils': 10.2.0(graphql@16.8.1) - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + '@graphql-tools/merge': 9.0.4(graphql@16.8.2) + '@graphql-tools/utils': 10.2.0(graphql@16.8.2) + graphql: 16.8.2 tslib: 2.6.2 value-or-promise: 1.0.12 dev: false - /@graphql-tools/utils@10.2.0(graphql@16.8.1): + /@graphql-tools/utils@10.2.0(graphql@16.8.2): resolution: {integrity: sha512-HYV7dO6pNA2nGKawygaBpk8y+vXOUjjzzO43W/Kb7EPRmXUEQKjHxPYRvQbiF72u1N3XxwGK5jnnFk9WVhUwYw==} engines: {node: '>=16.0.0'} peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 dependencies: - '@graphql-typed-document-node/core': 3.2.0(graphql@16.8.1) + '@graphql-typed-document-node/core': 3.2.0(graphql@16.8.2) cross-inspect: 1.0.0 dset: 3.1.3 - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 tslib: 2.6.2 dev: false - /@graphql-typed-document-node/core@3.2.0(graphql@16.8.1): + /@graphql-typed-document-node/core@3.2.0(graphql@16.8.2): resolution: {integrity: sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==} peerDependencies: graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 dev: false /@graphql-yoga/logger@2.0.0: @@ -3228,14 +3315,26 @@ packages: react-dom: 18.2.0(react@18.2.0) dev: false - /@hono/node-server@1.11.2: - resolution: {integrity: sha512-JhX0nUC66GeDxpIdMKWDRMEwtQBa64CY907iAF1sYqb4m2p2PdSU7zkbnNhAZLg/6IjSlTuj6CF307JlBXVvpg==} + /@hono/node-server@1.11.3: + resolution: {integrity: sha512-mFg3qlKkDtMWSalX5Gyh6Zd3MXay0biGobFlyJ49i6R1smBBS1CYkNZbvwLlw+4sSrHO4ZiH7kj4TcLpl2Jr3g==} engines: {node: '>=18.14.1'} dev: false + /@hono/trpc-server@0.3.2(@trpc/server@10.45.2)(hono@4.5.0): + resolution: {integrity: sha512-dTKDrSldjBn0hi9FjHOGdiHgMCWPoW5NaBUTQRNLyVL9JcJJu9oiwTFoIscPFxc9CF2rAeA8aRGHoFJK+A8cLw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@trpc/server': ^10.10.0 || >11.0.0-rc + hono: '>=4.*' + dependencies: + '@trpc/server': 10.45.2 + hono: 4.5.0 + dev: false + /@humanwhocodes/config-array@0.11.13: resolution: {integrity: sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==} engines: {node: '>=10.10.0'} + deprecated: Use @eslint/config-array instead dependencies: '@humanwhocodes/object-schema': 2.0.1 debug: 4.3.4(supports-color@8.1.1) @@ -3251,6 +3350,7 @@ packages: /@humanwhocodes/object-schema@2.0.1: resolution: {integrity: sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==} + deprecated: Use @eslint/object-schema instead dev: true /@ipld/dag-cbor@7.0.3: @@ -4207,6 +4307,16 @@ packages: unist-util-visit: 5.0.0 dev: false + /@trpc/client@10.45.2: + resolution: {integrity: sha512-ykALM5kYWTLn1zYuUOZ2cPWlVfrXhc18HzBDyRhoPYN0jey4iQHEFSEowfnhg1RvYnrAVjNBgHNeSAXjrDbGwg==} + peerDependencies: + '@trpc/server': 10.45.2 + dev: false + + /@trpc/server@10.45.2: + resolution: {integrity: sha512-wOrSThNNE4HUnuhJG6PfDRp4L2009KDVxsd+2VYH8ro6o/7/jwYZ8Uu5j+VaW+mOmc8EHerHzGcdbGNQSAUPgg==} + dev: false + /@trysound/sax@0.2.0: resolution: {integrity: sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==} engines: {node: '>=10.13.0'} @@ -4242,7 +4352,6 @@ packages: resolution: {integrity: sha512-TZBjD+yOsyrUJGmcUj6OS3JADk3+UZcNv3NOBqGkM09bZdi28fNZw8ODqbMOLfKCu7RYCO62/ldq1iHbzxqoPw==} dependencies: '@types/node': 20.11.24 - dev: true /@types/bn.js@5.1.5: resolution: {integrity: sha512-V46N0zwKRF5Q00AZ6hWtN0T8gGmDUaUzLWQvHFo5yThtVwK/VCenFY3wXVbOvNfajEpsTfQM4IN9k/d6gUVX3A==} @@ -4416,7 +4525,6 @@ packages: '@types/node': 20.11.24 pg-protocol: 1.6.0 pg-types: 4.0.1 - dev: true /@types/prompts@2.4.9: resolution: {integrity: sha512-qTxFi6Buiu8+50/+3DGIWLHM6QuWsEKugJnnP6iv2Mc4ncxE4A/OJkjuVOA+5X0X1S/nq5VJRa8Lu+nwcvbrKA==} @@ -4765,7 +4873,7 @@ packages: zod: 3.22.4 dev: true - /abitype@0.9.8(typescript@5.3.3): + /abitype@0.9.8(typescript@5.3.3)(zod@3.23.8): resolution: {integrity: sha512-puLifILdm+8sjyss4S+fsUN09obiT1g2YW6CtcQF+QDzxR0euzgEB29MZujC6zMk2a6SVmtttq1fc6+YFA7WYQ==} peerDependencies: typescript: '>=5.0.4' @@ -4777,6 +4885,7 @@ packages: optional: true dependencies: typescript: 5.3.3 + zod: 3.23.8 /abitype@1.0.0(typescript@5.0.4): resolution: {integrity: sha512-NMeMah//6bJ56H5XRj8QCV4AwuW6hB6zqz2LnhhLdcWVQOsXki6/Pn3APeqxCma62nXIcmZWdu1DlHWS74umVQ==} @@ -4839,9 +4948,6 @@ packages: /ajv-formats@2.1.1: resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} - peerDependenciesMeta: - ajv: - optional: true dependencies: ajv: 8.12.0 dev: false @@ -6803,6 +6909,98 @@ packages: engines: {node: '>=10'} dev: true + /drizzle-orm@0.31.2(@types/better-sqlite3@7.6.10)(@types/pg@8.10.9)(@types/react@18.2.46)(better-sqlite3@11.1.2)(kysely@0.26.3)(pg@8.11.3)(react@18.2.0): + resolution: {integrity: sha512-QnenevbnnAzmbNzQwbhklvIYrDE8YER8K7kSrAWQSV1YvFCdSQPzj+jzqRdTSsV2cDqSpQ0NXGyL1G9I43LDLg==} + peerDependencies: + '@aws-sdk/client-rds-data': '>=3' + '@cloudflare/workers-types': '>=3' + '@electric-sql/pglite': '>=0.1.1' + '@libsql/client': '*' + '@neondatabase/serverless': '>=0.1' + '@op-engineering/op-sqlite': '>=2' + '@opentelemetry/api': ^1.4.1 + '@planetscale/database': '>=1' + '@tidbcloud/serverless': '*' + '@types/better-sqlite3': '*' + '@types/pg': '*' + '@types/react': '>=18' + '@types/sql.js': '*' + '@vercel/postgres': '>=0.8.0' + '@xata.io/client': '*' + better-sqlite3: '>=7' + bun-types: '*' + expo-sqlite: '>=13.2.0' + knex: '*' + kysely: '*' + mysql2: '>=2' + pg: '>=8' + postgres: '>=3' + react: '>=18' + sql.js: '>=1' + sqlite3: '>=5' + peerDependenciesMeta: + '@aws-sdk/client-rds-data': + optional: true + '@cloudflare/workers-types': + optional: true + '@electric-sql/pglite': + optional: true + '@libsql/client': + optional: true + '@neondatabase/serverless': + optional: true + '@op-engineering/op-sqlite': + optional: true + '@opentelemetry/api': + optional: true + '@planetscale/database': + optional: true + '@tidbcloud/serverless': + optional: true + '@types/better-sqlite3': + optional: true + '@types/pg': + optional: true + '@types/react': + optional: true + '@types/sql.js': + optional: true + '@vercel/postgres': + optional: true + '@xata.io/client': + optional: true + better-sqlite3: + optional: true + bun-types: + optional: true + expo-sqlite: + optional: true + knex: + optional: true + kysely: + optional: true + mysql2: + optional: true + pg: + optional: true + postgres: + optional: true + react: + optional: true + sql.js: + optional: true + sqlite3: + optional: true + dependencies: + '@types/better-sqlite3': 7.6.10 + '@types/pg': 8.10.9 + '@types/react': 18.2.46 + better-sqlite3: 11.1.2 + kysely: 0.26.3 + pg: 8.11.3 + react: 18.2.0 + dev: false + /dset@3.1.3: resolution: {integrity: sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ==} engines: {node: '>=4'} @@ -8210,50 +8408,42 @@ packages: resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} dev: true - /graphql-import-node@0.0.5(graphql@16.8.1): + /graphql-import-node@0.0.5(graphql@16.8.2): resolution: {integrity: sha512-OXbou9fqh9/Lm7vwXT0XoRN9J5+WCYKnbiTalgFDvkQERITRmcfncZs6aVABedd5B85yQU5EULS4a5pnbpuI0Q==} peerDependencies: graphql: '*' dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 dev: true - /graphql-request@6.1.0(graphql@16.8.1): + /graphql-request@6.1.0(graphql@16.8.2): resolution: {integrity: sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw==} peerDependencies: graphql: 14 - 16 dependencies: - '@graphql-typed-document-node/core': 3.2.0(graphql@16.8.1) + '@graphql-typed-document-node/core': 3.2.0(graphql@16.8.2) cross-fetch: 3.1.8 - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 transitivePeerDependencies: - encoding dev: false - /graphql-type-json@0.3.2(graphql@16.8.1): - resolution: {integrity: sha512-J+vjof74oMlCWXSvt0DOf2APEdZOCdubEvGDUAlqH//VBYcOYsGgRW7Xzorr44LvkjiuvecWc8fChxuZZbChtg==} - peerDependencies: - graphql: '>=0.8.0' - dependencies: - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) - dev: false - - /graphql-yoga@5.3.0(graphql@16.8.1): - resolution: {integrity: sha512-6mXoGE5AMN6YNugJvjBFIQ0dFUiOMloN0dAzLL8GFt4iJ5WlWRgjdzGHod2zZz7yWQokEVD42DHgrc7NY3Dm0w==} + /graphql-yoga@5.3.1(graphql@16.8.2): + resolution: {integrity: sha512-n918QV6TF7xTjb9ASnozgsr4ydMc08c+x4eRAWKxxWVwSnzdP2xeN2zw1ljIzRD0ccSCNoBajGDKwcZkJDitPA==} engines: {node: '>=18.0.0'} peerDependencies: graphql: ^15.2.0 || ^16.0.0 dependencies: '@envelop/core': 5.0.1 - '@graphql-tools/executor': 1.2.6(graphql@16.8.1) - '@graphql-tools/schema': 10.0.3(graphql@16.8.1) - '@graphql-tools/utils': 10.2.0(graphql@16.8.1) + '@graphql-tools/executor': 1.2.6(graphql@16.8.2) + '@graphql-tools/schema': 10.0.3(graphql@16.8.2) + '@graphql-tools/utils': 10.2.0(graphql@16.8.2) '@graphql-yoga/logger': 2.0.0 '@graphql-yoga/subscription': 5.0.0 '@whatwg-node/fetch': 0.9.17 '@whatwg-node/server': 0.9.34 dset: 3.1.3 - graphql: 16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a) + graphql: 16.8.2 lru-cache: 10.1.0 tslib: 2.6.2 dev: false @@ -8263,10 +8453,9 @@ packages: engines: {node: '>= 10.x'} dev: true - /graphql@16.8.1(patch_hash=3zvcnrptpojleshpmtp6be677a): - resolution: {integrity: sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==} + /graphql@16.8.2: + resolution: {integrity: sha512-cvVIBILwuoSyD54U4cF/UXDh5yAobhNV/tPygI4lZhgOIJQE/WLWC4waBRb4I6bDVYb3OVx3lfHbaQOEoUD5sg==} engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} - patched: true /gray-matter@4.0.3: resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} @@ -8392,7 +8581,7 @@ packages: devlop: 1.1.0 hast-util-from-parse5: 8.0.1 parse5: 7.1.2 - vfile: 6.0.1 + vfile: 6.0.2 vfile-message: 4.0.2 dev: false @@ -8404,7 +8593,7 @@ packages: devlop: 1.1.0 hastscript: 8.0.0 property-information: 6.5.0 - vfile: 6.0.1 + vfile: 6.0.2 vfile-location: 5.0.2 web-namespaces: 2.0.1 dev: false @@ -8421,8 +8610,8 @@ packages: '@types/hast': 3.0.4 dev: false - /hast-util-raw@9.0.3: - resolution: {integrity: sha512-ICWvVOF2fq4+7CMmtCPD5CM4QKjPbHpPotE6+8tDooV0ZuyJVUzHsrNX+O5NaRbieTf0F7FfeBOMAwi6Td0+yQ==} + /hast-util-raw@9.0.4: + resolution: {integrity: sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA==} dependencies: '@types/hast': 3.0.4 '@types/unist': 3.0.2 @@ -8434,7 +8623,7 @@ packages: parse5: 7.1.2 unist-util-position: 5.0.0 unist-util-visit: 5.0.0 - vfile: 6.0.1 + vfile: 6.0.2 web-namespaces: 2.0.1 zwitch: 2.0.4 dev: false @@ -8491,7 +8680,7 @@ packages: '@types/unist': 3.0.2 ccount: 2.0.1 comma-separated-tokens: 2.0.3 - hast-util-raw: 9.0.3 + hast-util-raw: 9.0.4 hast-util-whitespace: 3.0.0 html-void-elements: 3.0.0 mdast-util-to-hast: 13.2.0 @@ -8561,10 +8750,9 @@ packages: minimalistic-crypto-utils: 1.0.1 dev: true - /hono@4.4.2: - resolution: {integrity: sha512-bRhZ+BM9r04lRN2i9wiZ18yQNbZxHsmmRIItoAb43nRkHnIDsFhFh4mJ0seEs06FvenibpAgSVNHQ8ZzcDQx2A==} + /hono@4.5.0: + resolution: {integrity: sha512-ZbezypZfn4odyApjCCv+Fw5OgweBqRLA/EsMyc4FUknFvBJcBIKhHy4sqmD1rWpBc/3wUlaQ6tqOPjk36R1ckg==} engines: {node: '>=16.0.0'} - dev: false /hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} @@ -9826,6 +10014,7 @@ packages: engines: {node: '>=12'} dependencies: '@jridgewell/sourcemap-codec': 1.4.15 + dev: true /make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} @@ -10139,7 +10328,7 @@ packages: trim-lines: 3.0.1 unist-util-position: 5.0.0 unist-util-visit: 5.0.0 - vfile: 6.0.1 + vfile: 6.0.2 dev: false /mdast-util-to-markdown@1.5.0: @@ -11323,7 +11512,6 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - dev: true /on-exit-leak-free@2.1.2: resolution: {integrity: sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==} @@ -11725,7 +11913,6 @@ packages: /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - dev: true /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -11760,7 +11947,6 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 - dev: true /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -11942,7 +12128,6 @@ packages: /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} - dev: true /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} @@ -11954,7 +12139,6 @@ packages: engines: {node: '>= 6'} dependencies: obuf: 1.1.2 - dev: true /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} @@ -11964,7 +12148,6 @@ packages: /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} - dev: true /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} @@ -11976,11 +12159,9 @@ packages: /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} - dev: true /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - dev: true /posthog-node@4.0.0: resolution: {integrity: sha512-jEZnNbgb/3FNk+gNwtTcyz3j+62zIN+UTPotONfacVXJnoI70KScSkKdIR+rvP9tA2kjBSoHQxGwJuizs27o9A==} @@ -12446,7 +12627,7 @@ packages: hast-util-to-text: 4.0.0 katex: 0.16.9 unist-util-visit-parents: 6.0.1 - vfile: 6.0.1 + vfile: 6.0.2 dev: false /rehype-pretty-code@0.10.1(shikiji@0.6.10): @@ -12465,8 +12646,8 @@ packages: resolution: {integrity: sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==} dependencies: '@types/hast': 3.0.4 - hast-util-raw: 9.0.3 - vfile: 6.0.1 + hast-util-raw: 9.0.4 + vfile: 6.0.2 dev: false /remark-frontmatter@4.0.1: @@ -14072,7 +14253,7 @@ packages: extend: 3.0.2 is-plain-obj: 4.1.0 trough: 2.1.0 - vfile: 6.0.1 + vfile: 6.0.2 dev: false /unist-util-find-after@5.0.0: @@ -14321,7 +14502,7 @@ packages: resolution: {integrity: sha512-NXPYyxyBSH7zB5U6+3uDdd6Nybz6o6/od9rk8bp9H8GR3L+cm/fC0uUTbqBmUTnMCUDslAGBOIKNfvvb+gGlDg==} dependencies: '@types/unist': 3.0.2 - vfile: 6.0.1 + vfile: 6.0.2 dev: false /vfile-message@3.1.4: @@ -14347,8 +14528,8 @@ packages: vfile-message: 3.1.4 dev: false - /vfile@6.0.1: - resolution: {integrity: sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==} + /vfile@6.0.2: + resolution: {integrity: sha512-zND7NlS8rJYb/sPqkb13ZvbbUoExdbi4w3SfRrMq6R3FvnLQmmfpajJNITuuYm6AZ5uao9vy4BAos3EXBPf2rg==} dependencies: '@types/unist': 3.0.2 unist-util-stringify-position: 4.0.0 @@ -14391,7 +14572,7 @@ packages: '@noble/hashes': 1.3.2 '@scure/bip32': 1.3.2 '@scure/bip39': 1.2.1 - abitype: 0.9.8(typescript@5.3.3) + abitype: 0.9.8(typescript@5.3.3)(zod@3.23.8) isows: 1.0.3(ws@8.13.0) typescript: 5.3.3 ws: 8.13.0 @@ -14424,7 +14605,7 @@ packages: - zod dev: true - /viem@1.21.4(typescript@5.3.3): + /viem@1.21.4(typescript@5.3.3)(zod@3.23.8): resolution: {integrity: sha512-BNVYdSaUjeS2zKQgPs+49e5JKocfo60Ib2yiXOWBT6LuVxY1I/6fFX3waEtpXvL1Xn4qu+BVitVtMh9lyThyhQ==} peerDependencies: typescript: '>=5.0.4' @@ -14437,7 +14618,7 @@ packages: '@noble/hashes': 1.3.2 '@scure/bip32': 1.3.2 '@scure/bip39': 1.2.1 - abitype: 0.9.8(typescript@5.3.3) + abitype: 0.9.8(typescript@5.3.3)(zod@3.23.8) isows: 1.0.3(ws@8.13.0) typescript: 5.3.3 ws: 8.13.0 @@ -14993,6 +15174,9 @@ packages: /zod@3.22.4: resolution: {integrity: sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==} + /zod@3.23.8: + resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + /zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} dev: false