diff --git a/.changeset/config.json b/.changeset/config.json
index 39874eb3c..b6606880a 100644
--- a/.changeset/config.json
+++ b/.changeset/config.json
@@ -8,6 +8,7 @@
"ponder-examples-feature-blocks",
"ponder-examples-feature-factory",
"ponder-examples-feature-filter",
+ "ponder-examples-feature-api-functions",
"ponder-examples-feature-multichain",
"ponder-examples-feature-call-traces",
"ponder-examples-feature-proxy",
@@ -22,7 +23,10 @@
"ponder-examples-with-foundry-ponder",
"ponder-examples-with-foundry-foundry",
"ponder-examples-with-nextjs",
- "ponder-examples-with-nextjs-ponder"
+ "ponder-examples-with-nextjs-ponder",
+ "ponder-examples-with-trpc",
+ "ponder-examples-with-trpc-ponder",
+ "ponder-examples-with-trpc-client"
],
"linked": [],
"access": "public",
diff --git a/.changeset/shy-donuts-battle.md b/.changeset/shy-donuts-battle.md
new file mode 100644
index 000000000..4cbc9f011
--- /dev/null
+++ b/.changeset/shy-donuts-battle.md
@@ -0,0 +1,6 @@
+---
+"create-ponder": minor
+"@ponder/core": minor
+---
+
+Introduced API functions. [Read more](https://ponder.sh/docs/query/api-functions). Please read the [migration guide](https://ponder.sh/docs/migration-guide) for more information.
diff --git a/docs/pages/docs/_meta.ts b/docs/pages/docs/_meta.ts
index 5a7a0975b..a59037dab 100644
--- a/docs/pages/docs/_meta.ts
+++ b/docs/pages/docs/_meta.ts
@@ -30,11 +30,11 @@ export default {
},
indexing: { display: "children", title: "Indexing" },
- "-- Query the database": {
+ "-- Query": {
type: "separator",
- title: "Query the database",
+ title: "Query",
},
- query: { display: "children", title: "Query the database" },
+ query: { display: "children", title: "Query" },
"-- Production": {
type: "separator",
diff --git a/docs/pages/docs/indexing/create-update-records.mdx b/docs/pages/docs/indexing/create-update-records.mdx
index 8585da6fb..6876c8132 100644
--- a/docs/pages/docs/indexing/create-update-records.mdx
+++ b/docs/pages/docs/indexing/create-update-records.mdx
@@ -1,11 +1,11 @@
---
-title: "Create and Update Records"
+title: "Create and update records"
description: "Learn how to create and update records in the Ponder database."
---
# Create & update records
-Ponder's store API is inspired by the [Prisma Client API](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#model-queries). The store supports the following methods.
+The **Store API** is inspired by [Prisma](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#model-queries) and supports the following methods.
- [`create`](#create)
- [`update`](#update)
diff --git a/docs/pages/docs/migration-guide.mdx b/docs/pages/docs/migration-guide.mdx
index e5ad2e3ea..297d35a38 100644
--- a/docs/pages/docs/migration-guide.mdx
+++ b/docs/pages/docs/migration-guide.mdx
@@ -3,12 +3,38 @@ title: "Migration Guide"
description: "A guide for migrating to new versions of Ponder."
---
-import { FileTree, Steps } from "nextra/components";
-import { Callout } from "nextra/components";
+import { FileTree, Steps, Tabs, Callout } from "nextra/components";
import Architecture from "../../public/architecture.svg";
# Migration guide
+## 0.5.0
+
+**Breaking:** This release adds [Hono](https://hono.dev) as a peer dependency. After upgrading, install `hono` in your project.
+
+{/* prettier-ignore */}
+
+
+```bash filename="shell"
+pnpm add hono@latest
+```
+
+
+```bash filename="shell"
+yarn add hono@latest
+```
+
+
+```bash filename="shell"
+npm install hono@latest
+```
+
+
+
+### Introduced API functions
+
+This release added support for API functions. [Read more](/docs/query/api-functions).
+
## 0.4.0
This release changes the location of database tables when using both SQLite and Postgres.
@@ -17,11 +43,11 @@ It **does not** require any changes to your application code, and does not bust
Please read the new docs on [direct SQL](/docs/query/direct-sql) for a detailed overview.
-#### SQLite
+### SQLite
Ponder now uses the `.ponder/sqlite/public.db` file for indexed tables. Before, the tables were present as views in the `.ponder/sqlite/ponder.db`. Now, the`.ponder/sqlite/ponder.db` file is only used internally by Ponder.
-#### Postgres
+### Postgres
Ponder now creates a table in the `public` schema for each table in `ponder.schema.ts`. Before, Ponder created them as views in the `ponder` schema.
@@ -31,7 +57,7 @@ This also changes the zero-downtime behavior on platforms like Railway. For more
- [Direct SQL](/docs/query/direct-sql)
- [Zero-downtime deployments](/docs/production/zero-downtime)
-#### Postgres table cleanup
+### Postgres table cleanup
After upgrading to `0.4.x`, you can run the following Postgres SQL script to clean up stale tables and views created by `0.3.x` Ponder apps.
diff --git a/docs/pages/docs/query/_meta.ts b/docs/pages/docs/query/_meta.ts
index 19f84ab05..49e84a20c 100644
--- a/docs/pages/docs/query/_meta.ts
+++ b/docs/pages/docs/query/_meta.ts
@@ -1,4 +1,5 @@
export default {
+ "api-functions": "API functions",
"graphql": "GraphQL",
"direct-sql": "Direct SQL",
};
diff --git a/docs/pages/docs/query/api-functions.mdx b/docs/pages/docs/query/api-functions.mdx
new file mode 100644
index 000000000..f18d13679
--- /dev/null
+++ b/docs/pages/docs/query/api-functions.mdx
@@ -0,0 +1,254 @@
+---
+title: "API functions"
+description: "Use API functions to customize the API layer of your app."
+---
+
+import { Callout, Steps } from "nextra/components";
+
+# API functions
+
+**API functions** are user-defined TypeScript functions that handle web requests. You can use them to customize the API layer of your app with complex SQL queries, authentication, data from external sources, and more.
+
+API functions are built on top of [Hono](https://hono.dev/), a fast and lightweight routing framework.
+
+## Example projects
+
+These example apps demonstrate how to use API functions.
+
+- [**Basic**](https://github.com/ponder-sh/ponder/tree/main/examples/feature-api-functions/src/api/index.ts) - An ERC20 app that responds to `GET` requests and uses the [Select API](#query-the-database) to build custom SQL queries.
+- [**tRPC**](https://github.com/ponder-sh/ponder/tree/main/examples/with-trpc/client/index.ts) - An app that creates a [tRPC](https://trpc.io) server and a script that uses a tRPC client with end-to-end type safety.
+
+## Get started
+
+
+
+### Upgrade to `>=0.5.0`
+
+API functions are available starting from version `0.5.0`. Read the [migration guide](/docs/migration-guide#050) for more details.
+
+### Create `src/api/index.ts` file
+
+To enable API functions, create a file named `src/api/index.ts` with the following code. You can register API functions in any `.ts` file in the `src/api/` directory.
+
+```ts filename="src/api/index.ts"
+import { ponder } from "@/generated";
+
+ponder.get("/hello", (c) => {
+ return c.text("Hello, world!");
+});
+```
+
+### Send a request
+
+Visit `http://localhost:42069/hello` in your browser to see the response.
+
+```plaintext filename="Response"
+Hello, world!
+```
+
+### Register GraphQL middleware
+
+
+ Once you create an API function file, you have "opted in" to API functions and
+ your app **will not** serve the standard GraphQL API by default.
+
+
+To continue using the standard GraphQL API, register the `graphql` middleware exported from `@ponder/core`.
+
+```ts filename="src/api/index.ts" {2,4-5}
+import { ponder } from "@/generated";
+import { graphql } from "@ponder/core";
+
+ponder.use("/", graphql());
+ponder.use("/graphql", graphql());
+
+// ...
+```
+
+
+
+## Query the database
+
+API functions can query the database using the read-only **Select API**, a type-safe query builder powered by [Drizzle](https://orm.drizzle.team/docs/overview). The Select API supports complex filters, joins, aggregations, set operations, and more.
+
+
+ The Select API is only available within API functions. Indexing functions use
+ the [Store API](/docs/indexing/create-update-records) (`findUnique`, `upsert`,
+ etc) which supports writes and is reorg-aware.
+
+
+### Select
+
+The API function context contains a built-in database client (`db`) and an object for each table in your schema (`tables`). These objects are type-safe – changes to your `ponder.schema.ts` file will be reflected immediately.
+
+To build a query, use `c.db.select(){:ts}` and include a table object using `.from(c.tables.TableName){:ts}`.
+
+
+
+```ts filename="ponder.schema.ts" {4}
+import { createSchema } from "@ponder/core";
+
+export default createSchema((p) => ({
+ Account: p.createTable({
+ id: p.string(),
+ balance: p.bigint(),
+ }),
+}));
+```
+
+```ts filename="src/api/index.ts" {6}
+import { ponder } from "@/generated";
+
+ponder.get("/account/:address", async (c) => {
+ const address = c.req.param("address");
+
+ const account = await c.db.select(c.tables.Account).limit(1);
+
+ return c.json(account);
+});
+```
+
+
+
+To build more complex queries, use `join`, `groupBy`, `where`, `orderBy`, `limit`, and other methods. Drizzle's filter & conditional operators (like `eq`, `gte`, and `or`) are re-exported by `@ponder/core`.
+
+For more details, please reference the [Drizzle documentation](https://orm.drizzle.team/docs/select).
+
+```ts filename="src/api/index.ts" {2,7-11}
+import { ponder } from "@/generated";
+import { gte } from "@ponder/core";
+
+ponder.get("/whales", async (c) => {
+ const { Account } = c.tables;
+
+ const whales = await c.db
+ .select({ address: Account.id, balance: Account.balance })
+ .from(Account.id)
+ .where(gte(TransferEvent.balance, 1_000_000_000n))
+ .limit(1);
+
+ return c.json(whales);
+});
+```
+
+### Execute
+
+To run raw SQL queries, use `db.execute(...){:ts}` with the `sql` utility function. [Read more](https://orm.drizzle.team/docs/sql) about the `sql` function.
+
+```ts filename="src/api/index.ts" {2,7-9}
+import { ponder } from "@/generated";
+import { sql } from "@ponder/core";
+
+ponder.get("/:token/ticker", async (c) => {
+ const token = c.req.param("token");
+
+ const result = await c.db.execute(
+ sql`SELECT ticker FROM "Token" WHERE id = ${token}`
+ );
+ const ticker = result.rows[0]?.ticker;
+
+ return c.text(ticker);
+});
+```
+
+## API reference
+
+### `get()`
+
+Use `ponder.get()` to handle HTTP `GET` requests. The `c` context object contains the request, response helpers, and the database connection.
+
+```ts filename="src/api/index.ts" {3,5}
+import { ponder } from "@/generated";
+
+ponder.get("/account/:address", async (c) => {
+ const { Account } = c.tables;
+ const address = c.req.param("address");
+
+ const account = await c.db
+ .select()
+ .from(Account)
+ .where({ id: address })
+ .first();
+
+ if (account) {
+ return c.json(account);
+ } else {
+ return c.status(404).json({ error: "Account not found" });
+ }
+});
+```
+
+### `post()`
+
+
+ API functions cannot write to the database, even when handling `POST`
+ requests.
+
+
+Use `ponder.post()` to handle HTTP `POST` requests.
+
+In this example, we calculate the volume of transfers for each recipient within a given time range. The `fromTimestamp` and `toTimestamp` parameters are passed in the request body.
+
+```ts filename="src/api/index.ts" {5,7-9}
+import { ponder } from "@/generated";
+import { and, gte, sum } from "@ponder/core";
+
+ponder.post("/volume", async (c) => {
+ const { TransferEvent } = c.tables;
+
+ const body = await c.req.json();
+ const { fromTimestamp, toTimestamp } = body;
+
+ const volumeChartData = await c.db
+ .select({
+ to: TransferEvent.toId,
+ volume: sum(TransferEvent.amount),
+ })
+ .from(TransferEvent)
+ .groupBy(TransferEvent.toId)
+ .where(
+ and(
+ gte(TransferEvent.timestamp, fromTimestamp),
+ lte(TransferEvent.timestamp, toTimestamp)
+ )
+ )
+ .limit(1);
+
+ return c.json(volumeChartData);
+});
+```
+
+### `use()`
+
+Use `ponder.use(...){:ts}` to add middleware to your API functions. Middleware functions can modify the request and response objects, add logs, authenticate requests, and more. [Read more](https://hono.dev/docs/guides/middleware) about Hono middleware.
+
+```ts filename="src/api/index.ts" {3}
+import { ponder } from "@/generated";
+
+ponder.use((c) => {
+ console.log("Request received:", c.req.url);
+ return c.next();
+});
+```
+
+### `hono`
+
+Use `ponder.hono` to access the underlying Hono instance.
+
+```ts filename="src/api/index.ts" {3}
+import { ponder } from "@/generated";
+
+ponder.hono.notFound((c) => {
+ return c.text("Custom 404 Message", 404);
+});
+
+// ...
+```
+
+## Reserved routes
+
+If you register API functions that conflict with these internal routes, the build will fail.
+
+- `/health`: Returns a `200` status code after the app has completed historical indexing OR the healthcheck timeout has expired, whichever comes first. [Read more](/docs/production/zero-downtime) about healthchecks.
+- `/metrics`: Returns Prometheus metrics. [Read more](/docs/advanced/metrics) about metrics.
+- `/status`: Returns indexing status object. [Read more](/docs/advanced/status) about indexing status.
diff --git a/examples/feature-api-functions/.env.example b/examples/feature-api-functions/.env.example
new file mode 100644
index 000000000..f7745c21c
--- /dev/null
+++ b/examples/feature-api-functions/.env.example
@@ -0,0 +1,5 @@
+# Mainnet RPC URL used for fetching blockchain data. Alchemy is recommended.
+PONDER_RPC_URL_1=https://eth-mainnet.g.alchemy.com/v2/...
+
+# (Optional) Postgres database URL. If not provided, SQLite will be used.
+DATABASE_URL=
\ No newline at end of file
diff --git a/examples/feature-api-functions/.eslintrc.json b/examples/feature-api-functions/.eslintrc.json
new file mode 100644
index 000000000..359e2bbfa
--- /dev/null
+++ b/examples/feature-api-functions/.eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "ponder"
+}
diff --git a/examples/feature-api-functions/.gitignore b/examples/feature-api-functions/.gitignore
new file mode 100644
index 000000000..f0c7e1177
--- /dev/null
+++ b/examples/feature-api-functions/.gitignore
@@ -0,0 +1,18 @@
+# Dependencies
+/node_modules
+
+# Debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+.pnpm-debug.log*
+
+# Misc
+.DS_Store
+
+# Env files
+.env*.local
+
+# Ponder
+/generated/
+/.ponder/
diff --git a/examples/feature-api-functions/abis/erc20ABI.ts b/examples/feature-api-functions/abis/erc20ABI.ts
new file mode 100644
index 000000000..94cbc6a33
--- /dev/null
+++ b/examples/feature-api-functions/abis/erc20ABI.ts
@@ -0,0 +1,147 @@
+export const erc20ABI = [
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "DOMAIN_SEPARATOR",
+ outputs: [{ name: "", internalType: "bytes32", type: "bytes32" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [
+ { name: "", internalType: "address", type: "address" },
+ { name: "", internalType: "address", type: "address" },
+ ],
+ name: "allowance",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "spender", internalType: "address", type: "address" },
+ { name: "amount", internalType: "uint256", type: "uint256" },
+ ],
+ name: "approve",
+ outputs: [{ name: "", internalType: "bool", type: "bool" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [{ name: "", internalType: "address", type: "address" }],
+ name: "balanceOf",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "decimals",
+ outputs: [{ name: "", internalType: "uint8", type: "uint8" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "name",
+ outputs: [{ name: "", internalType: "string", type: "string" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [{ name: "", internalType: "address", type: "address" }],
+ name: "nonces",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "owner", internalType: "address", type: "address" },
+ { name: "spender", internalType: "address", type: "address" },
+ { name: "value", internalType: "uint256", type: "uint256" },
+ { name: "deadline", internalType: "uint256", type: "uint256" },
+ { name: "v", internalType: "uint8", type: "uint8" },
+ { name: "r", internalType: "bytes32", type: "bytes32" },
+ { name: "s", internalType: "bytes32", type: "bytes32" },
+ ],
+ name: "permit",
+ outputs: [],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "symbol",
+ outputs: [{ name: "", internalType: "string", type: "string" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "totalSupply",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "to", internalType: "address", type: "address" },
+ { name: "amount", internalType: "uint256", type: "uint256" },
+ ],
+ name: "transfer",
+ outputs: [{ name: "", internalType: "bool", type: "bool" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "from", internalType: "address", type: "address" },
+ { name: "to", internalType: "address", type: "address" },
+ { name: "amount", internalType: "uint256", type: "uint256" },
+ ],
+ name: "transferFrom",
+ outputs: [{ name: "", internalType: "bool", type: "bool" }],
+ },
+ {
+ type: "event",
+ anonymous: false,
+ inputs: [
+ {
+ name: "owner",
+ internalType: "address",
+ type: "address",
+ indexed: true,
+ },
+ {
+ name: "spender",
+ internalType: "address",
+ type: "address",
+ indexed: true,
+ },
+ {
+ name: "amount",
+ internalType: "uint256",
+ type: "uint256",
+ indexed: false,
+ },
+ ],
+ name: "Approval",
+ },
+ {
+ type: "event",
+ anonymous: false,
+ inputs: [
+ { name: "from", internalType: "address", type: "address", indexed: true },
+ { name: "to", internalType: "address", type: "address", indexed: true },
+ {
+ name: "amount",
+ internalType: "uint256",
+ type: "uint256",
+ indexed: false,
+ },
+ ],
+ name: "Transfer",
+ },
+] as const;
diff --git a/examples/feature-api-functions/package.json b/examples/feature-api-functions/package.json
new file mode 100644
index 000000000..9578a0a34
--- /dev/null
+++ b/examples/feature-api-functions/package.json
@@ -0,0 +1,27 @@
+{
+ "name": "ponder-examples-feature-api-functions",
+ "private": true,
+ "type": "module",
+ "scripts": {
+ "dev": "ponder dev",
+ "start": "ponder start",
+ "codegen": "ponder codegen",
+ "serve": "ponder serve",
+ "lint": "eslint .",
+ "typecheck": "tsc"
+ },
+ "dependencies": {
+ "@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
+ "viem": "^1.19.9"
+ },
+ "devDependencies": {
+ "@types/node": "^20.10.0",
+ "eslint": "^8.54.0",
+ "eslint-config-ponder": "workspace:*",
+ "typescript": "^5.3.2"
+ },
+ "engines": {
+ "node": ">=18.14"
+ }
+}
diff --git a/examples/feature-api-functions/ponder-env.d.ts b/examples/feature-api-functions/ponder-env.d.ts
new file mode 100644
index 000000000..03126bf92
--- /dev/null
+++ b/examples/feature-api-functions/ponder-env.d.ts
@@ -0,0 +1,28 @@
+// This file enables type checking and editor autocomplete for this Ponder project.
+// After upgrading, you may find that changes have been made to this file.
+// If this happens, please commit the changes. Do not manually edit this file.
+// See https://ponder.sh/docs/getting-started/installation#typescript for more information.
+
+declare module "@/generated" {
+ import type { Virtual } from "@ponder/core";
+
+ type config = typeof import("./ponder.config.ts").default;
+ type schema = typeof import("./ponder.schema.ts").default;
+
+ export const ponder: Virtual.Registry;
+
+ export type EventNames = Virtual.EventNames;
+ export type Event = Virtual.Event<
+ config,
+ name
+ >;
+ export type Context = Virtual.Context<
+ config,
+ schema,
+ name
+ >;
+ export type ApiContext = Virtual.Drizzle;
+ export type IndexingFunctionArgs =
+ Virtual.IndexingFunctionArgs;
+ export type Schema = Virtual.Schema;
+}
diff --git a/examples/feature-api-functions/ponder.config.ts b/examples/feature-api-functions/ponder.config.ts
new file mode 100644
index 000000000..32ae5a2ba
--- /dev/null
+++ b/examples/feature-api-functions/ponder.config.ts
@@ -0,0 +1,21 @@
+import { createConfig } from "@ponder/core";
+import { http } from "viem";
+import { erc20ABI } from "./abis/erc20ABI";
+
+export default createConfig({
+ networks: {
+ mainnet: {
+ chainId: 1,
+ transport: http(process.env.PONDER_RPC_URL_1),
+ },
+ },
+ contracts: {
+ ERC20: {
+ network: "mainnet",
+ abi: erc20ABI,
+ address: "0x32353A6C91143bfd6C7d363B546e62a9A2489A20",
+ startBlock: 13142655,
+ endBlock: 13150000,
+ },
+ },
+});
diff --git a/examples/feature-api-functions/ponder.schema.ts b/examples/feature-api-functions/ponder.schema.ts
new file mode 100644
index 000000000..a722500c6
--- /dev/null
+++ b/examples/feature-api-functions/ponder.schema.ts
@@ -0,0 +1,50 @@
+import { createSchema } from "@ponder/core";
+
+export default createSchema((p) => ({
+ Account: p.createTable({
+ id: p.hex(),
+ balance: p.bigint(),
+ isOwner: p.boolean(),
+
+ allowances: p.many("Allowance.ownerId"),
+ approvalOwnerEvents: p.many("ApprovalEvent.ownerId"),
+ approvalSpenderEvents: p.many("ApprovalEvent.spenderId"),
+ transferFromEvents: p.many("TransferEvent.fromId"),
+ transferToEvents: p.many("TransferEvent.toId"),
+ }),
+ Allowance: p.createTable({
+ id: p.string(),
+ amount: p.bigint(),
+
+ ownerId: p.hex().references("Account.id"),
+ spenderId: p.hex().references("Account.id"),
+
+ owner: p.one("ownerId"),
+ spender: p.one("spenderId"),
+ }),
+ TransferEvent: p.createTable(
+ {
+ id: p.string(),
+ amount: p.bigint(),
+ timestamp: p.int(),
+
+ fromId: p.hex().references("Account.id"),
+ toId: p.hex().references("Account.id"),
+
+ from: p.one("fromId"),
+ to: p.one("toId"),
+ },
+ { fromIdIndex: p.index("fromId") },
+ ),
+ ApprovalEvent: p.createTable({
+ id: p.string(),
+ amount: p.bigint(),
+ timestamp: p.int(),
+
+ ownerId: p.hex().references("Account.id"),
+ spenderId: p.hex().references("Account.id"),
+
+ owner: p.one("ownerId"),
+ spender: p.one("spenderId"),
+ }),
+}));
diff --git a/examples/feature-api-functions/src/api/index.ts b/examples/feature-api-functions/src/api/index.ts
new file mode 100644
index 000000000..219dbde3b
--- /dev/null
+++ b/examples/feature-api-functions/src/api/index.ts
@@ -0,0 +1,47 @@
+import { ponder } from "@/generated";
+import { count, desc, eq, graphql, or, replaceBigInts } from "@ponder/core";
+import { formatEther, getAddress } from "viem";
+
+ponder.use("/graphql", graphql());
+
+ponder.get("/count", async (c) => {
+ const result = await c.db
+ .select({ count: count() })
+ .from(c.tables.TransferEvent);
+
+ if (result.length === 0) return c.text("0");
+ return c.text(String(result[0]!.count));
+});
+
+ponder.get("/count/:address", async (c) => {
+ const account = getAddress(c.req.param("address"));
+ const { TransferEvent } = c.tables;
+
+ const result = await c.db
+ .select({ count: count() })
+ .from(c.tables.TransferEvent)
+ .where(
+ or(eq(TransferEvent.fromId, account), eq(TransferEvent.toId, account)),
+ );
+
+ if (result.length === 0) return c.text("0");
+ return c.text(String(result[0]!.count));
+});
+
+ponder.get("/whale-transfers", async (c) => {
+ const { TransferEvent, Account } = c.tables;
+
+ // Top 10 transfers from whale accounts
+ const result = await c.db
+ .select({
+ amount: TransferEvent.amount,
+ senderBalance: Account.balance,
+ })
+ .from(TransferEvent)
+ .innerJoin(Account, eq(TransferEvent.fromId, Account.id))
+ .orderBy(desc(Account.balance))
+ .limit(10);
+
+ if (result.length === 0) return c.text("Not found", 500);
+ return c.json(replaceBigInts(result, (b) => formatEther(b)));
+});
diff --git a/examples/feature-api-functions/src/index.ts b/examples/feature-api-functions/src/index.ts
new file mode 100644
index 000000000..16bf33aa7
--- /dev/null
+++ b/examples/feature-api-functions/src/index.ts
@@ -0,0 +1,70 @@
+import { ponder } from "@/generated";
+
+ponder.on("ERC20:Transfer", async ({ event, context }) => {
+ const { Account, TransferEvent } = context.db;
+
+ // Create an Account for the sender, or update the balance if it already exists.
+ await Account.upsert({
+ id: event.args.from,
+ create: {
+ balance: BigInt(0),
+ isOwner: false,
+ },
+ update: ({ current }) => ({
+ balance: current.balance - event.args.amount,
+ }),
+ });
+
+ // Create an Account for the recipient, or update the balance if it already exists.
+ await Account.upsert({
+ id: event.args.to,
+ create: {
+ balance: event.args.amount,
+ isOwner: false,
+ },
+ update: ({ current }) => ({
+ balance: current.balance + event.args.amount,
+ }),
+ });
+
+ // Create a TransferEvent.
+ await TransferEvent.create({
+ id: event.log.id,
+ data: {
+ fromId: event.args.from,
+ toId: event.args.to,
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ },
+ });
+});
+
+ponder.on("ERC20:Approval", async ({ event, context }) => {
+ const { Allowance, ApprovalEvent } = context.db;
+
+ const allowanceId = `${event.args.owner}-${event.args.spender}`;
+
+ // Create or update the Allowance.
+ await Allowance.upsert({
+ id: allowanceId,
+ create: {
+ ownerId: event.args.owner,
+ spenderId: event.args.spender,
+ amount: event.args.amount,
+ },
+ update: {
+ amount: event.args.amount,
+ },
+ });
+
+ // Create an ApprovalEvent.
+ await ApprovalEvent.create({
+ id: event.log.id,
+ data: {
+ ownerId: event.args.owner,
+ spenderId: event.args.spender,
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ },
+ });
+});
diff --git a/examples/feature-api-functions/tsconfig.json b/examples/feature-api-functions/tsconfig.json
new file mode 100644
index 000000000..592b9a939
--- /dev/null
+++ b/examples/feature-api-functions/tsconfig.json
@@ -0,0 +1,26 @@
+{
+ "compilerOptions": {
+ // Type checking
+ "strict": true,
+ "noUncheckedIndexedAccess": true,
+
+ // Interop constraints
+ "verbatimModuleSyntax": false,
+ "esModuleInterop": true,
+ "isolatedModules": true,
+ "allowSyntheticDefaultImports": true,
+ "resolveJsonModule": true,
+
+ // Language and environment
+ "moduleResolution": "bundler",
+ "module": "ESNext",
+ "noEmit": true,
+ "lib": ["ES2022"],
+ "target": "ES2022",
+
+ // Skip type checking for node modules
+ "skipLibCheck": true
+ },
+ "include": ["./**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/examples/feature-blocks/package.json b/examples/feature-blocks/package.json
index 30ac311bb..0ee3320c7 100644
--- a/examples/feature-blocks/package.json
+++ b/examples/feature-blocks/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-blocks/ponder-env.d.ts b/examples/feature-blocks/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-blocks/ponder-env.d.ts
+++ b/examples/feature-blocks/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/feature-call-traces/package.json b/examples/feature-call-traces/package.json
index 8cb804b97..47495ec9a 100644
--- a/examples/feature-call-traces/package.json
+++ b/examples/feature-call-traces/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-call-traces/ponder-env.d.ts b/examples/feature-call-traces/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-call-traces/ponder-env.d.ts
+++ b/examples/feature-call-traces/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/feature-factory/package.json b/examples/feature-factory/package.json
index 910df7367..40a0a0f2d 100644
--- a/examples/feature-factory/package.json
+++ b/examples/feature-factory/package.json
@@ -13,6 +13,7 @@
"dependencies": {
"@ponder/core": "workspace:*",
"abitype": "^0.10.2",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-factory/ponder-env.d.ts b/examples/feature-factory/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-factory/ponder-env.d.ts
+++ b/examples/feature-factory/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/feature-filter/package.json b/examples/feature-filter/package.json
index 2f06346e6..7d67fddaf 100644
--- a/examples/feature-filter/package.json
+++ b/examples/feature-filter/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-filter/ponder-env.d.ts b/examples/feature-filter/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-filter/ponder-env.d.ts
+++ b/examples/feature-filter/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/feature-multichain/package.json b/examples/feature-multichain/package.json
index 0ca701d42..242ffd29d 100644
--- a/examples/feature-multichain/package.json
+++ b/examples/feature-multichain/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-multichain/ponder-env.d.ts b/examples/feature-multichain/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-multichain/ponder-env.d.ts
+++ b/examples/feature-multichain/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/feature-proxy/package.json b/examples/feature-proxy/package.json
index 01f8f305f..679e8bf32 100644
--- a/examples/feature-proxy/package.json
+++ b/examples/feature-proxy/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-proxy/ponder-env.d.ts b/examples/feature-proxy/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-proxy/ponder-env.d.ts
+++ b/examples/feature-proxy/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/feature-read-contract/package.json b/examples/feature-read-contract/package.json
index 6a833a937..6107ee15f 100644
--- a/examples/feature-read-contract/package.json
+++ b/examples/feature-read-contract/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/feature-read-contract/ponder-env.d.ts b/examples/feature-read-contract/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/feature-read-contract/ponder-env.d.ts
+++ b/examples/feature-read-contract/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/project-friendtech/package.json b/examples/project-friendtech/package.json
index 90cb0c488..de1ac3979 100644
--- a/examples/project-friendtech/package.json
+++ b/examples/project-friendtech/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/project-friendtech/ponder-env.d.ts b/examples/project-friendtech/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/project-friendtech/ponder-env.d.ts
+++ b/examples/project-friendtech/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/project-uniswap-v3-flash/package.json b/examples/project-uniswap-v3-flash/package.json
index 01e060539..c6c8696b7 100644
--- a/examples/project-uniswap-v3-flash/package.json
+++ b/examples/project-uniswap-v3-flash/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/project-uniswap-v3-flash/ponder-env.d.ts b/examples/project-uniswap-v3-flash/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/project-uniswap-v3-flash/ponder-env.d.ts
+++ b/examples/project-uniswap-v3-flash/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/reference-erc1155/package.json b/examples/reference-erc1155/package.json
index fbb297929..e72e4e9a8 100644
--- a/examples/reference-erc1155/package.json
+++ b/examples/reference-erc1155/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/reference-erc1155/ponder-env.d.ts b/examples/reference-erc1155/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/reference-erc1155/ponder-env.d.ts
+++ b/examples/reference-erc1155/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/reference-erc20/package.json b/examples/reference-erc20/package.json
index 2398e90c5..dcf42502f 100644
--- a/examples/reference-erc20/package.json
+++ b/examples/reference-erc20/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/reference-erc20/ponder-env.d.ts b/examples/reference-erc20/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/reference-erc20/ponder-env.d.ts
+++ b/examples/reference-erc20/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/reference-erc4626/package.json b/examples/reference-erc4626/package.json
index b504c9b72..9c7a2ffeb 100644
--- a/examples/reference-erc4626/package.json
+++ b/examples/reference-erc4626/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/reference-erc4626/ponder-env.d.ts b/examples/reference-erc4626/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/reference-erc4626/ponder-env.d.ts
+++ b/examples/reference-erc4626/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/reference-erc721/package.json b/examples/reference-erc721/package.json
index 879b2ab64..edd6b2abe 100644
--- a/examples/reference-erc721/package.json
+++ b/examples/reference-erc721/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/reference-erc721/ponder-env.d.ts b/examples/reference-erc721/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/reference-erc721/ponder-env.d.ts
+++ b/examples/reference-erc721/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/with-foundry/ponder/package.json b/examples/with-foundry/ponder/package.json
index 5811c4c65..1f57f0646 100644
--- a/examples/with-foundry/ponder/package.json
+++ b/examples/with-foundry/ponder/package.json
@@ -11,6 +11,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.3"
},
"devDependencies": {
diff --git a/examples/with-foundry/ponder/ponder-env.d.ts b/examples/with-foundry/ponder/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/with-foundry/ponder/ponder-env.d.ts
+++ b/examples/with-foundry/ponder/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/with-nextjs/ponder/package.json b/examples/with-nextjs/ponder/package.json
index 83a129df0..fd47071e8 100644
--- a/examples/with-nextjs/ponder/package.json
+++ b/examples/with-nextjs/ponder/package.json
@@ -11,6 +11,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "hono": "^4.5.0",
"viem": "^1.19.9"
},
"devDependencies": {
diff --git a/examples/with-nextjs/ponder/ponder-env.d.ts b/examples/with-nextjs/ponder/ponder-env.d.ts
index f8e7347cf..03126bf92 100644
--- a/examples/with-nextjs/ponder/ponder-env.d.ts
+++ b/examples/with-nextjs/ponder/ponder-env.d.ts
@@ -21,6 +21,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/examples/with-trpc/client/env.d.ts b/examples/with-trpc/client/env.d.ts
new file mode 100644
index 000000000..74e33e49d
--- /dev/null
+++ b/examples/with-trpc/client/env.d.ts
@@ -0,0 +1 @@
+///
diff --git a/examples/with-trpc/client/index.ts b/examples/with-trpc/client/index.ts
new file mode 100644
index 000000000..df33d8e8e
--- /dev/null
+++ b/examples/with-trpc/client/index.ts
@@ -0,0 +1,17 @@
+import { createTRPCProxyClient, httpBatchLink } from "@trpc/client";
+import type { AppRouter } from "../ponder/src/api/index";
+
+const client = createTRPCProxyClient({
+ links: [
+ httpBatchLink({
+ url: "http://localhost:42069/trpc",
+ }),
+ ],
+});
+
+const response = await client.hello.query(
+ // ^?
+ "0xC1894e6a52c4C7Ac5b2e0b25583Ea48bf45DA14a",
+);
+
+console.log(response);
diff --git a/examples/with-trpc/client/package.json b/examples/with-trpc/client/package.json
new file mode 100644
index 000000000..02f06cc8f
--- /dev/null
+++ b/examples/with-trpc/client/package.json
@@ -0,0 +1,12 @@
+{
+ "name": "ponder-examples-with-trpc-client",
+ "private": true,
+ "type": "module",
+ "scripts": {},
+ "dependencies": {
+ "@trpc/client": "^10.45.2"
+ },
+ "engines": {
+ "node": ">=18.14"
+ }
+}
diff --git a/examples/with-trpc/client/tsconfig.json b/examples/with-trpc/client/tsconfig.json
new file mode 100644
index 000000000..592b9a939
--- /dev/null
+++ b/examples/with-trpc/client/tsconfig.json
@@ -0,0 +1,26 @@
+{
+ "compilerOptions": {
+ // Type checking
+ "strict": true,
+ "noUncheckedIndexedAccess": true,
+
+ // Interop constraints
+ "verbatimModuleSyntax": false,
+ "esModuleInterop": true,
+ "isolatedModules": true,
+ "allowSyntheticDefaultImports": true,
+ "resolveJsonModule": true,
+
+ // Language and environment
+ "moduleResolution": "bundler",
+ "module": "ESNext",
+ "noEmit": true,
+ "lib": ["ES2022"],
+ "target": "ES2022",
+
+ // Skip type checking for node modules
+ "skipLibCheck": true
+ },
+ "include": ["./**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/examples/with-trpc/package.json b/examples/with-trpc/package.json
new file mode 100644
index 000000000..4873fe900
--- /dev/null
+++ b/examples/with-trpc/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "ponder-examples-with-trpc",
+ "private": true,
+ "type": "module",
+ "engines": {
+ "node": ">=18.14"
+ }
+}
diff --git a/examples/with-trpc/ponder/.env.example b/examples/with-trpc/ponder/.env.example
new file mode 100644
index 000000000..f7745c21c
--- /dev/null
+++ b/examples/with-trpc/ponder/.env.example
@@ -0,0 +1,5 @@
+# Mainnet RPC URL used for fetching blockchain data. Alchemy is recommended.
+PONDER_RPC_URL_1=https://eth-mainnet.g.alchemy.com/v2/...
+
+# (Optional) Postgres database URL. If not provided, SQLite will be used.
+DATABASE_URL=
\ No newline at end of file
diff --git a/examples/with-trpc/ponder/.eslintrc.json b/examples/with-trpc/ponder/.eslintrc.json
new file mode 100644
index 000000000..359e2bbfa
--- /dev/null
+++ b/examples/with-trpc/ponder/.eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "ponder"
+}
diff --git a/examples/with-trpc/ponder/.gitignore b/examples/with-trpc/ponder/.gitignore
new file mode 100644
index 000000000..f0c7e1177
--- /dev/null
+++ b/examples/with-trpc/ponder/.gitignore
@@ -0,0 +1,18 @@
+# Dependencies
+/node_modules
+
+# Debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+.pnpm-debug.log*
+
+# Misc
+.DS_Store
+
+# Env files
+.env*.local
+
+# Ponder
+/generated/
+/.ponder/
diff --git a/examples/with-trpc/ponder/abis/erc20ABI.ts b/examples/with-trpc/ponder/abis/erc20ABI.ts
new file mode 100644
index 000000000..94cbc6a33
--- /dev/null
+++ b/examples/with-trpc/ponder/abis/erc20ABI.ts
@@ -0,0 +1,147 @@
+export const erc20ABI = [
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "DOMAIN_SEPARATOR",
+ outputs: [{ name: "", internalType: "bytes32", type: "bytes32" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [
+ { name: "", internalType: "address", type: "address" },
+ { name: "", internalType: "address", type: "address" },
+ ],
+ name: "allowance",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "spender", internalType: "address", type: "address" },
+ { name: "amount", internalType: "uint256", type: "uint256" },
+ ],
+ name: "approve",
+ outputs: [{ name: "", internalType: "bool", type: "bool" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [{ name: "", internalType: "address", type: "address" }],
+ name: "balanceOf",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "decimals",
+ outputs: [{ name: "", internalType: "uint8", type: "uint8" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "name",
+ outputs: [{ name: "", internalType: "string", type: "string" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [{ name: "", internalType: "address", type: "address" }],
+ name: "nonces",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "owner", internalType: "address", type: "address" },
+ { name: "spender", internalType: "address", type: "address" },
+ { name: "value", internalType: "uint256", type: "uint256" },
+ { name: "deadline", internalType: "uint256", type: "uint256" },
+ { name: "v", internalType: "uint8", type: "uint8" },
+ { name: "r", internalType: "bytes32", type: "bytes32" },
+ { name: "s", internalType: "bytes32", type: "bytes32" },
+ ],
+ name: "permit",
+ outputs: [],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "symbol",
+ outputs: [{ name: "", internalType: "string", type: "string" }],
+ },
+ {
+ stateMutability: "view",
+ type: "function",
+ inputs: [],
+ name: "totalSupply",
+ outputs: [{ name: "", internalType: "uint256", type: "uint256" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "to", internalType: "address", type: "address" },
+ { name: "amount", internalType: "uint256", type: "uint256" },
+ ],
+ name: "transfer",
+ outputs: [{ name: "", internalType: "bool", type: "bool" }],
+ },
+ {
+ stateMutability: "nonpayable",
+ type: "function",
+ inputs: [
+ { name: "from", internalType: "address", type: "address" },
+ { name: "to", internalType: "address", type: "address" },
+ { name: "amount", internalType: "uint256", type: "uint256" },
+ ],
+ name: "transferFrom",
+ outputs: [{ name: "", internalType: "bool", type: "bool" }],
+ },
+ {
+ type: "event",
+ anonymous: false,
+ inputs: [
+ {
+ name: "owner",
+ internalType: "address",
+ type: "address",
+ indexed: true,
+ },
+ {
+ name: "spender",
+ internalType: "address",
+ type: "address",
+ indexed: true,
+ },
+ {
+ name: "amount",
+ internalType: "uint256",
+ type: "uint256",
+ indexed: false,
+ },
+ ],
+ name: "Approval",
+ },
+ {
+ type: "event",
+ anonymous: false,
+ inputs: [
+ { name: "from", internalType: "address", type: "address", indexed: true },
+ { name: "to", internalType: "address", type: "address", indexed: true },
+ {
+ name: "amount",
+ internalType: "uint256",
+ type: "uint256",
+ indexed: false,
+ },
+ ],
+ name: "Transfer",
+ },
+] as const;
diff --git a/examples/with-trpc/ponder/package.json b/examples/with-trpc/ponder/package.json
new file mode 100644
index 000000000..e0f3aa491
--- /dev/null
+++ b/examples/with-trpc/ponder/package.json
@@ -0,0 +1,29 @@
+{
+ "private": true,
+ "name": "ponder-examples-with-trpc-ponder",
+ "type": "module",
+ "scripts": {
+ "dev": "ponder dev",
+ "start": "ponder start",
+ "codegen": "ponder codegen",
+ "lint": "eslint .",
+ "typecheck": "tsc"
+ },
+ "dependencies": {
+ "@hono/trpc-server": "^0.3.2",
+ "@ponder/core": "workspace:*",
+ "@trpc/server": "^10.45.2",
+ "hono": "^4.5.0",
+ "viem": "^1.19.9",
+ "zod": "^3.23.8"
+ },
+ "devDependencies": {
+ "@types/node": "^20.10.0",
+ "eslint": "^8.54.0",
+ "eslint-config-ponder": "workspace:*",
+ "typescript": "^5.3.2"
+ },
+ "engines": {
+ "node": ">=18.14"
+ }
+}
diff --git a/examples/with-trpc/ponder/ponder-env.d.ts b/examples/with-trpc/ponder/ponder-env.d.ts
new file mode 100644
index 000000000..03126bf92
--- /dev/null
+++ b/examples/with-trpc/ponder/ponder-env.d.ts
@@ -0,0 +1,28 @@
+// This file enables type checking and editor autocomplete for this Ponder project.
+// After upgrading, you may find that changes have been made to this file.
+// If this happens, please commit the changes. Do not manually edit this file.
+// See https://ponder.sh/docs/getting-started/installation#typescript for more information.
+
+declare module "@/generated" {
+ import type { Virtual } from "@ponder/core";
+
+ type config = typeof import("./ponder.config.ts").default;
+ type schema = typeof import("./ponder.schema.ts").default;
+
+ export const ponder: Virtual.Registry;
+
+ export type EventNames = Virtual.EventNames;
+ export type Event = Virtual.Event<
+ config,
+ name
+ >;
+ export type Context = Virtual.Context<
+ config,
+ schema,
+ name
+ >;
+ export type ApiContext = Virtual.Drizzle;
+ export type IndexingFunctionArgs =
+ Virtual.IndexingFunctionArgs;
+ export type Schema = Virtual.Schema;
+}
diff --git a/examples/with-trpc/ponder/ponder.config.ts b/examples/with-trpc/ponder/ponder.config.ts
new file mode 100644
index 000000000..32ae5a2ba
--- /dev/null
+++ b/examples/with-trpc/ponder/ponder.config.ts
@@ -0,0 +1,21 @@
+import { createConfig } from "@ponder/core";
+import { http } from "viem";
+import { erc20ABI } from "./abis/erc20ABI";
+
+export default createConfig({
+ networks: {
+ mainnet: {
+ chainId: 1,
+ transport: http(process.env.PONDER_RPC_URL_1),
+ },
+ },
+ contracts: {
+ ERC20: {
+ network: "mainnet",
+ abi: erc20ABI,
+ address: "0x32353A6C91143bfd6C7d363B546e62a9A2489A20",
+ startBlock: 13142655,
+ endBlock: 13150000,
+ },
+ },
+});
diff --git a/examples/with-trpc/ponder/ponder.schema.ts b/examples/with-trpc/ponder/ponder.schema.ts
new file mode 100644
index 000000000..a722500c6
--- /dev/null
+++ b/examples/with-trpc/ponder/ponder.schema.ts
@@ -0,0 +1,50 @@
+import { createSchema } from "@ponder/core";
+
+export default createSchema((p) => ({
+ Account: p.createTable({
+ id: p.hex(),
+ balance: p.bigint(),
+ isOwner: p.boolean(),
+
+ allowances: p.many("Allowance.ownerId"),
+ approvalOwnerEvents: p.many("ApprovalEvent.ownerId"),
+ approvalSpenderEvents: p.many("ApprovalEvent.spenderId"),
+ transferFromEvents: p.many("TransferEvent.fromId"),
+ transferToEvents: p.many("TransferEvent.toId"),
+ }),
+ Allowance: p.createTable({
+ id: p.string(),
+ amount: p.bigint(),
+
+ ownerId: p.hex().references("Account.id"),
+ spenderId: p.hex().references("Account.id"),
+
+ owner: p.one("ownerId"),
+ spender: p.one("spenderId"),
+ }),
+ TransferEvent: p.createTable(
+ {
+ id: p.string(),
+ amount: p.bigint(),
+ timestamp: p.int(),
+
+ fromId: p.hex().references("Account.id"),
+ toId: p.hex().references("Account.id"),
+
+ from: p.one("fromId"),
+ to: p.one("toId"),
+ },
+ { fromIdIndex: p.index("fromId") },
+ ),
+ ApprovalEvent: p.createTable({
+ id: p.string(),
+ amount: p.bigint(),
+ timestamp: p.int(),
+
+ ownerId: p.hex().references("Account.id"),
+ spenderId: p.hex().references("Account.id"),
+
+ owner: p.one("ownerId"),
+ spender: p.one("spenderId"),
+ }),
+}));
diff --git a/examples/with-trpc/ponder/src/api/index.ts b/examples/with-trpc/ponder/src/api/index.ts
new file mode 100644
index 000000000..709808d87
--- /dev/null
+++ b/examples/with-trpc/ponder/src/api/index.ts
@@ -0,0 +1,33 @@
+import { type ApiContext, ponder } from "@/generated";
+import { trpcServer } from "@hono/trpc-server";
+import { eq } from "@ponder/core";
+import { initTRPC } from "@trpc/server";
+import type { Address } from "viem";
+import { z } from "zod";
+
+const t = initTRPC.context().create();
+
+const appRouter = t.router({
+ hello: t.procedure.input(z.string()).query(async ({ input, ctx }) => {
+ const { Account } = ctx.tables;
+
+ const account = await ctx.db
+ .select({ balance: Account.balance })
+ .from(Account)
+ .where(eq(Account.id, input as Address))
+ .limit(1);
+
+ if (account.length === 0) return null;
+ return account[0]!.balance.toString();
+ }),
+});
+
+export type AppRouter = typeof appRouter;
+
+ponder.use(
+ "/trpc/*",
+ trpcServer({
+ router: appRouter,
+ createContext: (_, c) => c.var,
+ }),
+);
diff --git a/examples/with-trpc/ponder/src/index.ts b/examples/with-trpc/ponder/src/index.ts
new file mode 100644
index 000000000..16bf33aa7
--- /dev/null
+++ b/examples/with-trpc/ponder/src/index.ts
@@ -0,0 +1,70 @@
+import { ponder } from "@/generated";
+
+ponder.on("ERC20:Transfer", async ({ event, context }) => {
+ const { Account, TransferEvent } = context.db;
+
+ // Create an Account for the sender, or update the balance if it already exists.
+ await Account.upsert({
+ id: event.args.from,
+ create: {
+ balance: BigInt(0),
+ isOwner: false,
+ },
+ update: ({ current }) => ({
+ balance: current.balance - event.args.amount,
+ }),
+ });
+
+ // Create an Account for the recipient, or update the balance if it already exists.
+ await Account.upsert({
+ id: event.args.to,
+ create: {
+ balance: event.args.amount,
+ isOwner: false,
+ },
+ update: ({ current }) => ({
+ balance: current.balance + event.args.amount,
+ }),
+ });
+
+ // Create a TransferEvent.
+ await TransferEvent.create({
+ id: event.log.id,
+ data: {
+ fromId: event.args.from,
+ toId: event.args.to,
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ },
+ });
+});
+
+ponder.on("ERC20:Approval", async ({ event, context }) => {
+ const { Allowance, ApprovalEvent } = context.db;
+
+ const allowanceId = `${event.args.owner}-${event.args.spender}`;
+
+ // Create or update the Allowance.
+ await Allowance.upsert({
+ id: allowanceId,
+ create: {
+ ownerId: event.args.owner,
+ spenderId: event.args.spender,
+ amount: event.args.amount,
+ },
+ update: {
+ amount: event.args.amount,
+ },
+ });
+
+ // Create an ApprovalEvent.
+ await ApprovalEvent.create({
+ id: event.log.id,
+ data: {
+ ownerId: event.args.owner,
+ spenderId: event.args.spender,
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ },
+ });
+});
diff --git a/examples/with-trpc/ponder/tsconfig.json b/examples/with-trpc/ponder/tsconfig.json
new file mode 100644
index 000000000..592b9a939
--- /dev/null
+++ b/examples/with-trpc/ponder/tsconfig.json
@@ -0,0 +1,26 @@
+{
+ "compilerOptions": {
+ // Type checking
+ "strict": true,
+ "noUncheckedIndexedAccess": true,
+
+ // Interop constraints
+ "verbatimModuleSyntax": false,
+ "esModuleInterop": true,
+ "isolatedModules": true,
+ "allowSyntheticDefaultImports": true,
+ "resolveJsonModule": true,
+
+ // Language and environment
+ "moduleResolution": "bundler",
+ "module": "ESNext",
+ "noEmit": true,
+ "lib": ["ES2022"],
+ "target": "ES2022",
+
+ // Skip type checking for node modules
+ "skipLibCheck": true
+ },
+ "include": ["./**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/package.json b/package.json
index 3e1447e3f..daa7d404a 100644
--- a/package.json
+++ b/package.json
@@ -17,13 +17,17 @@
"@biomejs/biome": "^1.8.1",
"@changesets/changelog-github": "^0.4.8",
"@changesets/cli": "^2.26.2",
+ "hono": "4.5.0",
"lint-staged": "^15.1.0",
"simple-git-hooks": "^2.9.0",
"typescript": "5.0.4",
"viem": "1.16.0"
},
"lint-staged": {
- "*.ts": ["biome format --no-errors-on-unmatched --write", "biome check"],
+ "*.ts": [
+ "biome format --no-errors-on-unmatched --write",
+ "biome check --no-errors-on-unmatched"
+ ],
"!(*.ts)": ["biome format --no-errors-on-unmatched --write"]
},
"simple-git-hooks": {
@@ -32,7 +36,6 @@
"packageManager": "pnpm@8.6.10",
"pnpm": {
"patchedDependencies": {
- "graphql@16.8.1": "patches/graphql@16.8.1.patch",
"detect-package-manager@3.0.1": "patches/detect-package-manager@3.0.1.patch"
},
"peerDependencyRules": {
diff --git a/packages/core/package.json b/packages/core/package.json
index 7799620c9..7ee14e36b 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -33,6 +33,7 @@
"typecheck": "tsc --noEmit"
},
"peerDependencies": {
+ "hono": ">=4.5",
"typescript": ">=5.0.4",
"viem": ">=1.16"
},
@@ -56,17 +57,15 @@
"dataloader": "^2.2.2",
"detect-package-manager": "^3.0.1",
"dotenv": "^16.3.1",
+ "drizzle-orm": "^0.31.2",
"emittery": "^1.0.1",
"ethereum-bloom-filters": "^1.0.10",
"glob": "^10.3.10",
"graphql": "^16.8.1",
- "graphql-type-json": "^0.3.2",
"graphql-yoga": "^5.3.0",
- "hono": "^4.4.2",
"http-terminator": "^3.2.0",
"ink": "^4.4.1",
"kysely": "^0.26.3",
- "magic-string": "^0.30.5",
"p-queue": "^7.4.1",
"pg": "^8.11.3",
"pg-connection-string": "^2.6.2",
diff --git a/packages/core/src/_test/e2e/erc20/src/api/index.ts b/packages/core/src/_test/e2e/erc20/src/api/index.ts
new file mode 100644
index 000000000..5cf910996
--- /dev/null
+++ b/packages/core/src/_test/e2e/erc20/src/api/index.ts
@@ -0,0 +1,11 @@
+// @ts-ignore
+import { ponder } from "@/generated";
+import { graphql } from "@/index.js";
+
+// biome-ignore lint/suspicious/noRedeclare: :)
+declare const ponder: import("@/index.js").Virtual.Registry<
+ typeof import("../../ponder.config.js").default,
+ typeof import("../../ponder.schema.js").default
+>;
+
+ponder.use("/graphql", graphql());
diff --git a/packages/core/src/_test/e2e/factory/src/api/index.ts b/packages/core/src/_test/e2e/factory/src/api/index.ts
new file mode 100644
index 000000000..5cf910996
--- /dev/null
+++ b/packages/core/src/_test/e2e/factory/src/api/index.ts
@@ -0,0 +1,11 @@
+// @ts-ignore
+import { ponder } from "@/generated";
+import { graphql } from "@/index.js";
+
+// biome-ignore lint/suspicious/noRedeclare: :)
+declare const ponder: import("@/index.js").Virtual.Registry<
+ typeof import("../../ponder.config.js").default,
+ typeof import("../../ponder.schema.js").default
+>;
+
+ponder.use("/graphql", graphql());
diff --git a/packages/core/src/_test/utils.ts b/packages/core/src/_test/utils.ts
index 3d5b2a053..bcfdff32b 100644
--- a/packages/core/src/_test/utils.ts
+++ b/packages/core/src/_test/utils.ts
@@ -696,8 +696,10 @@ export async function waitForIndexedBlock(
const interval = setInterval(async () => {
const response = await fetch(`http://localhost:${port}/status`);
if (response.status === 200) {
- const status = (await response.json()) as Status;
- const statusBlockNumber = status[networkName]?.block?.number;
+ const status = (await response.json()) as Status | null;
+ const statusBlockNumber = status
+ ? status[networkName]?.block?.number
+ : undefined;
if (
statusBlockNumber !== undefined &&
statusBlockNumber >= blockNumber
diff --git a/packages/core/src/bin/commands/codegen.ts b/packages/core/src/bin/commands/codegen.ts
index b61e2c7dd..ad49b0864 100644
--- a/packages/core/src/bin/commands/codegen.ts
+++ b/packages/core/src/bin/commands/codegen.ts
@@ -40,13 +40,13 @@ export async function codegen({ cliOptions }: { cliOptions: CliOptions }) {
const shutdown = setupShutdown({ common, cleanup });
- const buildResult = await buildService.start({ watch: false });
+ const { indexing } = await buildService.start({ watch: false });
- if (buildResult.status === "error") {
+ if (indexing.status === "error") {
logger.error({
service: "process",
msg: "Failed schema build",
- error: buildResult.error,
+ error: indexing.error,
});
await shutdown({ reason: "Failed schema build", code: 1 });
return;
@@ -57,7 +57,7 @@ export async function codegen({ cliOptions }: { cliOptions: CliOptions }) {
properties: { cli_command: "codegen" },
});
- runCodegen({ common, graphqlSchema: buildResult.build.graphqlSchema });
+ runCodegen({ common, graphqlSchema: indexing.build.graphqlSchema });
logger.info({ service: "codegen", msg: "Wrote ponder-env.d.ts" });
logger.info({ service: "codegen", msg: "Wrote schema.graphql" });
diff --git a/packages/core/src/bin/commands/dev.ts b/packages/core/src/bin/commands/dev.ts
index 46f0494d8..249bbcf98 100644
--- a/packages/core/src/bin/commands/dev.ts
+++ b/packages/core/src/bin/commands/dev.ts
@@ -1,6 +1,10 @@
import { existsSync } from "node:fs";
import path from "node:path";
-import { type BuildResult, createBuildService } from "@/build/index.js";
+import {
+ type ApiBuildResult,
+ type IndexingBuildResult,
+ createBuildService,
+} from "@/build/index.js";
import { createLogger } from "@/common/logger.js";
import { MetricsService } from "@/common/metrics.js";
import { buildOptions } from "@/common/options.js";
@@ -9,6 +13,7 @@ import { UiService } from "@/ui/service.js";
import { createQueue } from "@ponder/common";
import type { CliOptions } from "../ponder.js";
import { run } from "../utils/run.js";
+import { runServer } from "../utils/runServer.js";
import { setupShutdown } from "../utils/shutdown.js";
export async function dev({ cliOptions }: { cliOptions: CliOptions }) {
@@ -52,10 +57,12 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) {
const uiService = new UiService({ common });
- let cleanupReloadable = () => Promise.resolve();
+ let indexingCleanupReloadable = () => Promise.resolve();
+ let apiCleanupReloadable = () => Promise.resolve();
const cleanup = async () => {
- await cleanupReloadable();
+ await indexingCleanupReloadable();
+ await apiCleanupReloadable();
await buildService.kill();
await telemetry.kill();
uiService.kill();
@@ -63,57 +70,85 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) {
const shutdown = setupShutdown({ common, cleanup });
- const buildQueue = createQueue({
+ const indexingBuildQueue = createQueue({
initialStart: true,
concurrency: 1,
- worker: async (result: BuildResult) => {
- await cleanupReloadable();
+ worker: async (result: IndexingBuildResult) => {
+ await indexingCleanupReloadable();
if (result.status === "success") {
uiService.reset();
metrics.resetMetrics();
- cleanupReloadable = await run({
+ indexingCleanupReloadable = await run({
common,
build: result.build,
onFatalError: () => {
shutdown({ reason: "Received fatal error", code: 1 });
},
onReloadableError: (error) => {
- buildQueue.clear();
- buildQueue.add({ status: "error", error });
+ indexingBuildQueue.clear();
+ indexingBuildQueue.add({ status: "error", error });
},
});
} else {
// This handles build failures and indexing errors on hot reload.
uiService.setReloadableError();
- cleanupReloadable = () => Promise.resolve();
+ indexingCleanupReloadable = () => Promise.resolve();
}
},
});
- const initialResult = await buildService.start({
+ const apiBuildQueue = createQueue({
+ initialStart: true,
+ concurrency: 1,
+ worker: async (result: ApiBuildResult) => {
+ await apiCleanupReloadable();
+
+ if (result.status === "success") {
+ apiCleanupReloadable = await runServer({
+ common,
+ build: result.build,
+ });
+ } else {
+ // This handles build failures on hot reload.
+ uiService.setReloadableError();
+ apiCleanupReloadable = () => Promise.resolve();
+ }
+ },
+ });
+
+ const { api, indexing } = await buildService.start({
watch: true,
- onBuild: (buildResult) => {
- buildQueue.clear();
- buildQueue.add(buildResult);
+ onIndexingBuild: (buildResult) => {
+ indexingBuildQueue.clear();
+ indexingBuildQueue.add(buildResult);
+ },
+ onApiBuild: (buildResult) => {
+ apiBuildQueue.clear();
+ apiBuildQueue.add(buildResult);
},
});
- if (initialResult.status === "error") {
+ if (indexing.status === "error" || api.status === "error") {
await shutdown({ reason: "Failed intial build", code: 1 });
return cleanup;
}
telemetry.record({
name: "lifecycle:session_start",
- properties: { cli_command: "dev", ...buildPayload(initialResult.build) },
+ properties: {
+ cli_command: "dev",
+ ...buildPayload(indexing.build),
+ },
});
- buildQueue.add(initialResult);
+ indexingBuildQueue.add(indexing);
+ apiBuildQueue.add(api);
return async () => {
- buildQueue.pause();
+ indexingBuildQueue.pause();
+ apiBuildQueue.pause();
await cleanup();
};
}
diff --git a/packages/core/src/bin/commands/serve.ts b/packages/core/src/bin/commands/serve.ts
index 5513bd8bf..baf8d2d8c 100644
--- a/packages/core/src/bin/commands/serve.ts
+++ b/packages/core/src/bin/commands/serve.ts
@@ -5,9 +5,6 @@ import { MetricsService } from "@/common/metrics.js";
import { buildOptions } from "@/common/options.js";
import { buildPayload, createTelemetry } from "@/common/telemetry.js";
import { PostgresDatabaseService } from "@/database/postgres/service.js";
-import type { NamespaceInfo } from "@/database/service.js";
-import { getMetadataStore } from "@/indexing-store/metadata.js";
-import { getReadonlyStore } from "@/indexing-store/readonly.js";
import { createServer } from "@/server/service.js";
import type { CliOptions } from "../ponder.js";
import { setupShutdown } from "../utils/shutdown.js";
@@ -53,22 +50,24 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) {
const shutdown = setupShutdown({ common, cleanup });
- const initialResult = await buildService.start({ watch: false });
+ const { api, indexing } = await buildService.start({ watch: false });
// Once we have the initial build, we can kill the build service.
await buildService.kill();
- if (initialResult.status === "error") {
+ if (api.status === "error" || indexing.status === "error") {
await shutdown({ reason: "Failed intial build", code: 1 });
return cleanup;
}
telemetry.record({
name: "lifecycle:session_start",
- properties: { cli_command: "serve", ...buildPayload(initialResult.build) },
+ properties: {
+ cli_command: "serve",
+ ...buildPayload(indexing.build),
+ },
});
- const { databaseConfig, optionsConfig, schema, graphqlSchema } =
- initialResult.build;
+ const { databaseConfig, optionsConfig, schema } = api.build;
common.options = { ...common.options, ...optionsConfig };
@@ -98,32 +97,13 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) {
isReadonly: true,
});
- const readonlyStore = getReadonlyStore({
- encoding: "postgres",
- schema,
- // Note: `ponder serve` serves data from the `publishSchema`. Also, it does
- // not need the other fields in NamespaceInfo because it only uses findUnique
- // and findMany. We should ultimately add a PublicStore interface for this.
- namespaceInfo: {
- userNamespace: databaseConfig.publishSchema,
- } as unknown as NamespaceInfo,
- db: database.readonlyDb,
- common,
- });
-
- const metadataStore = getMetadataStore({
- encoding: database.kind,
- namespaceInfo: {
- userNamespace: databaseConfig.publishSchema,
- } as unknown as NamespaceInfo,
- db: database.readonlyDb,
- });
-
const server = await createServer({
- graphqlSchema,
+ app: api.build.app,
+ routes: api.build.routes,
common,
- readonlyStore,
- metadataStore,
+ schema,
+ database,
+ dbNamespace: databaseConfig.publishSchema,
});
cleanupReloadable = async () => {
diff --git a/packages/core/src/bin/commands/start.ts b/packages/core/src/bin/commands/start.ts
index 9b318fd22..110698255 100644
--- a/packages/core/src/bin/commands/start.ts
+++ b/packages/core/src/bin/commands/start.ts
@@ -6,6 +6,7 @@ import { buildOptions } from "@/common/options.js";
import { buildPayload, createTelemetry } from "@/common/telemetry.js";
import type { CliOptions } from "../ponder.js";
import { run } from "../utils/run.js";
+import { runServer } from "../utils/runServer.js";
import { setupShutdown } from "../utils/shutdown.js";
export async function start({ cliOptions }: { cliOptions: CliOptions }) {
@@ -41,31 +42,36 @@ export async function start({ cliOptions }: { cliOptions: CliOptions }) {
const buildService = await createBuildService({ common });
let cleanupReloadable = () => Promise.resolve();
+ let cleanupReloadableServer = () => Promise.resolve();
const cleanup = async () => {
await cleanupReloadable();
+ await cleanupReloadableServer();
await telemetry.kill();
};
const shutdown = setupShutdown({ common, cleanup });
- const initialResult = await buildService.start({ watch: false });
+ const { indexing, api } = await buildService.start({ watch: false });
// Once we have the initial build, we can kill the build service.
await buildService.kill();
- if (initialResult.status === "error") {
+ if (indexing.status === "error" || api.status === "error") {
await shutdown({ reason: "Failed intial build", code: 1 });
return cleanup;
}
telemetry.record({
name: "lifecycle:session_start",
- properties: { cli_command: "start", ...buildPayload(initialResult.build) },
+ properties: {
+ cli_command: "start",
+ ...buildPayload(indexing.build),
+ },
});
cleanupReloadable = await run({
common,
- build: initialResult.build,
+ build: indexing.build,
onFatalError: () => {
shutdown({ reason: "Received fatal error", code: 1 });
},
@@ -74,5 +80,10 @@ export async function start({ cliOptions }: { cliOptions: CliOptions }) {
},
});
+ cleanupReloadableServer = await runServer({
+ common,
+ build: api.build,
+ });
+
return cleanup;
}
diff --git a/packages/core/src/bin/utils/run.test.ts b/packages/core/src/bin/utils/run.test.ts
index 4d428819f..72e961421 100644
--- a/packages/core/src/bin/utils/run.test.ts
+++ b/packages/core/src/bin/utils/run.test.ts
@@ -3,10 +3,10 @@ import {
setupCommon,
setupIsolatedDatabase,
} from "@/_test/setup.js";
-import type { Build } from "@/build/index.js";
+import type { IndexingBuild } from "@/build/index.js";
import * as codegen from "@/common/codegen.js";
+import { buildGraphQLSchema } from "@/graphql/buildGraphqlSchema.js";
import { createSchema } from "@/schema/schema.js";
-import { buildGraphqlSchema } from "@/server/graphql/buildGraphqlSchema.js";
import { promiseWithResolvers } from "@ponder/common";
import { beforeEach, expect, test, vi } from "vitest";
import { run } from "./run.js";
@@ -26,10 +26,10 @@ const schema = createSchema((p) => ({
}),
}));
-const graphqlSchema = buildGraphqlSchema(schema);
+const graphqlSchema = buildGraphQLSchema(schema);
test("run() kill", async (context) => {
- const build: Build = {
+ const build: IndexingBuild = {
buildId: "buildId",
schema,
graphqlSchema,
@@ -59,7 +59,7 @@ test("run() setup", async (context) => {
"Erc20:setup": vi.fn(),
};
- const build: Build = {
+ const build: IndexingBuild = {
buildId: "buildId",
schema,
graphqlSchema,
@@ -88,7 +88,7 @@ test("run() setup error", async (context) => {
};
const onReloadableErrorPromiseResolver = promiseWithResolvers();
- const build: Build = {
+ const build: IndexingBuild = {
buildId: "buildId",
schema,
graphqlSchema,
diff --git a/packages/core/src/bin/utils/run.ts b/packages/core/src/bin/utils/run.ts
index f1fb889f1..14245e2ca 100644
--- a/packages/core/src/bin/utils/run.ts
+++ b/packages/core/src/bin/utils/run.ts
@@ -1,4 +1,4 @@
-import type { Build } from "@/build/index.js";
+import type { IndexingBuild } from "@/build/index.js";
import { runCodegen } from "@/common/codegen.js";
import type { Common } from "@/common/common.js";
import { PostgresDatabaseService } from "@/database/postgres/service.js";
@@ -10,7 +10,6 @@ import { getReadonlyStore } from "@/indexing-store/readonly.js";
import { getRealtimeStore } from "@/indexing-store/realtime.js";
import type { IndexingStore, Status } from "@/indexing-store/store.js";
import { createIndexingService } from "@/indexing/index.js";
-import { createServer } from "@/server/service.js";
import { PostgresSyncStore } from "@/sync-store/postgres/store.js";
import { SqliteSyncStore } from "@/sync-store/sqlite/store.js";
import type { SyncStore } from "@/sync-store/store.js";
@@ -41,7 +40,7 @@ export type RealtimeEvent =
};
/**
- * Starts the server, sync, and indexing services for the specified build.
+ * Starts the sync and indexing services for the specified build.
*/
export async function run({
common,
@@ -50,7 +49,7 @@ export async function run({
onReloadableError,
}: {
common: Common;
- build: Build;
+ build: IndexingBuild;
onFatalError: (error: Error) => void;
onReloadableError: (error: Error) => void;
}) {
@@ -60,8 +59,8 @@ export async function run({
optionsConfig,
networks,
sources,
- schema,
graphqlSchema,
+ schema,
indexingFunctions,
} = build;
@@ -108,22 +107,6 @@ export async function run({
namespaceInfo,
db: database.indexingDb,
});
- await metadataStore.setStatus(status);
-
- const readonlyStore = getReadonlyStore({
- encoding: database.kind,
- schema,
- namespaceInfo,
- db: database.readonlyDb,
- common,
- });
-
- const server = await createServer({
- common,
- graphqlSchema,
- readonlyStore,
- metadataStore,
- });
// This can be a long-running operation, so it's best to do it after
// starting the server so the app can become responsive more quickly.
@@ -215,6 +198,14 @@ export async function run({
},
});
+ const readonlyStore = getReadonlyStore({
+ encoding: database.kind,
+ schema,
+ namespaceInfo,
+ db: database.indexingDb,
+ common,
+ });
+
const historicalStore = getHistoricalStore({
encoding: database.kind,
schema,
@@ -340,14 +331,12 @@ export async function run({
const startPromise = start();
return async () => {
- const serverPromise = server.kill();
indexingService.kill();
await syncService.kill();
realtimeQueue.pause();
realtimeQueue.clear();
await realtimeQueue.onIdle();
await startPromise;
- await serverPromise;
await database.kill();
};
}
diff --git a/packages/core/src/bin/utils/runServer.ts b/packages/core/src/bin/utils/runServer.ts
new file mode 100644
index 000000000..b2f62f159
--- /dev/null
+++ b/packages/core/src/bin/utils/runServer.ts
@@ -0,0 +1,50 @@
+import type { ApiBuild } from "@/build/index.js";
+import type { Common } from "@/common/common.js";
+import { PostgresDatabaseService } from "@/database/postgres/service.js";
+import type { DatabaseService } from "@/database/service.js";
+import { SqliteDatabaseService } from "@/database/sqlite/service.js";
+import { createServer } from "@/server/service.js";
+
+/**
+ * Starts the server for the specified build.
+ */
+export async function runServer({
+ common,
+ build,
+}: {
+ common: Common;
+ build: ApiBuild;
+}) {
+ const { databaseConfig, optionsConfig, schema } = build;
+
+ common.options = { ...common.options, ...optionsConfig };
+
+ let database: DatabaseService;
+
+ if (databaseConfig.kind === "sqlite") {
+ const { directory } = databaseConfig;
+ database = new SqliteDatabaseService({ common, directory });
+ } else {
+ const { poolConfig, schema: userNamespace, publishSchema } = databaseConfig;
+ database = new PostgresDatabaseService({
+ common,
+ poolConfig,
+ userNamespace,
+ publishSchema,
+ });
+ }
+
+ const server = await createServer({
+ app: build.app,
+ routes: build.routes,
+ common,
+ schema,
+ database,
+ dbNamespace:
+ databaseConfig.kind === "sqlite" ? "public" : databaseConfig.schema,
+ });
+
+ return async () => {
+ await server.kill();
+ };
+}
diff --git a/packages/core/src/build/index.ts b/packages/core/src/build/index.ts
index 366a47b05..5bbcc2c03 100644
--- a/packages/core/src/build/index.ts
+++ b/packages/core/src/build/index.ts
@@ -1,6 +1,12 @@
import { type Extend, extend } from "@/utils/extend.js";
import { create, kill, start } from "./service.js";
-import type { Build, BuildResult, Service } from "./service.js";
+import type {
+ ApiBuild,
+ ApiBuildResult,
+ IndexingBuild,
+ IndexingBuildResult,
+ Service,
+} from "./service.js";
const methods = { start, kill };
@@ -8,4 +14,4 @@ export const createBuildService = extend(create, methods);
export type BuildService = Extend;
-export type { BuildResult, Build };
+export type { IndexingBuild, IndexingBuildResult, ApiBuild, ApiBuildResult };
diff --git a/packages/core/src/build/plugin.test.ts b/packages/core/src/build/plugin.test.ts
deleted file mode 100644
index 52664ad2e..000000000
--- a/packages/core/src/build/plugin.test.ts
+++ /dev/null
@@ -1,79 +0,0 @@
-import { expect, test } from "vitest";
-
-import { regex, replaceStateless, shim } from "./plugin.js";
-
-test("regex matches basic", () => {
- const code = `import { ponder } from "@/generated";\n`;
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches multiline", () => {
- const code =
- 'import { ponder } from "@/generated";\n' +
- 'ponder.on("PrimitiveManager:Swap", async ({ event, context }) => {\n';
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches import including types before", () => {
- const code = 'import { type Context, ponder } from "@/generated";\n';
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches import includinga types after", () => {
- const code = 'import { ponder, type Context } from "@/generated";\n';
-
- expect(regex.test(code)).toBe(true);
- expect(code.replace(regex, shim).includes(shim)).toBe(true);
-});
-
-test("regex matches import including newlines", () => {
- const code =
- "import {\n" + "ponder,\n" + "type Context,\n" + '} from "@/generated";\n';
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches no trailing semicolon", () => {
- const code = `import { ponder } from "@/generated"`;
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches no trailing single quote import", () => {
- const code = `import { ponder } from '@/generated'`;
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches no trailing newline", () => {
- const code = `import { ponder } from "@/generated";ponder.on("PrimitiveManager:Swap", async ({ event, context }) => {`;
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
-
-test("regex matches preceding import", () => {
- const code =
- `import {decodeEventLog} from "viem";\n` +
- `import {ponder} from "@/generated";\n`;
-
- expect(regex.test(code)).toBe(true);
- const s = replaceStateless(code);
- expect(s.toString().includes(shim)).toBe(true);
-});
diff --git a/packages/core/src/build/plugin.ts b/packages/core/src/build/plugin.ts
index cca7bf2f5..7088e4a02 100644
--- a/packages/core/src/build/plugin.ts
+++ b/packages/core/src/build/plugin.ts
@@ -1,40 +1,41 @@
-import MagicString from "magic-string";
import type { Plugin } from "vite";
-export const regex =
- /^import\s+\{[^}]*\bponder\b[^}]*\}\s+from\s+["']@\/generated["'];?.*$/gm;
+const virtualModule = `import { Hono } from "hono";
-export const shim = `export let ponder = {
+const ponderHono = {
+ routes: [],
+ get(...maybePathOrHandlers) {
+ this.routes.push({ method: "GET", pathOrHandlers: maybePathOrHandlers });
+ return this;
+ },
+ post(...maybePathOrHandlers) {
+ this.routes.push({ method: "POST", pathOrHandlers: maybePathOrHandlers });
+ return this;
+ },
+ use(...maybePathOrHandlers) {
+ this.routes.push({ method: "USE", pathOrHandlers: maybePathOrHandlers });
+ return this;
+ },
+};
+
+const ponder = {
+ ...ponderHono,
+ hono: new Hono(),
fns: [],
on(name, fn) {
this.fns.push({ name, fn });
},
};
-`;
-export function replaceStateless(code: string) {
- const s = new MagicString(code);
- // MagicString.replace calls regex.exec(), which increments `lastIndex`
- // on a match. We have to set this back to zero to use the same regex
- // multiple times.
- regex.lastIndex = 0;
- s.replace(regex, shim);
- return s;
-}
+export { ponder };
+`;
export const vitePluginPonder = (): Plugin => {
return {
name: "ponder",
- transform: (code, id) => {
- if (regex.test(code)) {
- const s = replaceStateless(code);
- const transformed = s.toString();
- const sourcemap = s.generateMap({ source: id });
-
- return { code: transformed, map: sourcemap };
- } else {
- return null;
- }
+ load: (id) => {
+ if (id === "@/generated") return virtualModule;
+ return null;
},
};
};
diff --git a/packages/core/src/build/service.ts b/packages/core/src/build/service.ts
index eaa96dd22..aa02adec3 100644
--- a/packages/core/src/build/service.ts
+++ b/packages/core/src/build/service.ts
@@ -1,15 +1,18 @@
import { createHash } from "node:crypto";
-import { readFileSync } from "node:fs";
+import fs from "node:fs";
import path from "node:path";
import type { Common } from "@/common/common.js";
+import { BuildError } from "@/common/errors.js";
import type { Config, OptionsConfig } from "@/config/config.js";
import type { DatabaseConfig } from "@/config/database.js";
import type { Network } from "@/config/networks.js";
import type { EventSource } from "@/config/sources.js";
+import { buildGraphQLSchema } from "@/graphql/buildGraphqlSchema.js";
+import type { PonderRoutes } from "@/hono/index.js";
import type { Schema } from "@/schema/common.js";
-import { buildGraphqlSchema } from "@/server/graphql/buildGraphqlSchema.js";
import { glob } from "glob";
import type { GraphQLSchema } from "graphql";
+import type { Hono } from "hono";
import { type ViteDevServer, createServer } from "vite";
import { ViteNodeRunner } from "vite-node/client";
import { ViteNodeServer } from "vite-node/server";
@@ -30,7 +33,10 @@ const BUILD_ID_VERSION = "1";
export type Service = {
// static
common: Common;
- srcRegex: RegExp;
+ indexingRegex: RegExp;
+ apiRegex: RegExp;
+ indexingPattern: string;
+ apiPattern: string;
// vite
viteDevServer: ViteDevServer;
@@ -38,7 +44,7 @@ export type Service = {
viteNodeRunner: ViteNodeRunner;
};
-export type Build = {
+type BaseBuild = {
// Build ID for caching
buildId: string;
// Config
@@ -49,22 +55,24 @@ export type Build = {
// Schema
schema: Schema;
graphqlSchema: GraphQLSchema;
- // Indexing functions
+};
+
+export type IndexingBuild = BaseBuild & {
indexingFunctions: IndexingFunctions;
};
-export type BuildResult =
- | { status: "success"; build: Build }
+export type ApiBuild = BaseBuild & {
+ app: Hono;
+ routes: PonderRoutes;
+};
+
+export type IndexingBuildResult =
+ | { status: "success"; build: IndexingBuild }
| { status: "error"; error: Error };
-type RawBuild = {
- config: { config: Config; contentHash: string };
- schema: { schema: Schema; contentHash: string };
- indexingFunctions: {
- indexingFunctions: RawIndexingFunctions;
- contentHash: string;
- };
-};
+export type ApiBuildResult =
+ | { status: "success"; build: ApiBuild }
+ | { status: "error"; error: Error };
export const create = async ({
common,
@@ -72,12 +80,28 @@ export const create = async ({
common: Common;
}): Promise => {
const escapeRegex = /[.*+?^${}()|[\]\\]/g;
- const escapedSrcDir = common.options.srcDir
+
+ const escapedIndexingDir = common.options.indexingDir
// If on Windows, use a POSIX path for this regex.
.replace(/\\/g, "/")
// Escape special characters in the path.
.replace(escapeRegex, "\\$&");
- const srcRegex = new RegExp(`^${escapedSrcDir}/.*\\.(ts|js)$`);
+ const indexingRegex = new RegExp(`^${escapedIndexingDir}/.*\\.(ts|js)$`);
+
+ const escapedApiDir = common.options.apiDir
+ // If on Windows, use a POSIX path for this regex.
+ .replace(/\\/g, "/")
+ // Escape special characters in the path.
+ .replace(escapeRegex, "\\$&");
+ const apiRegex = new RegExp(`^${escapedApiDir}/.*\\.(ts|js)$`);
+
+ const indexingPattern = path
+ .join(common.options.indexingDir, "**/*.{js,mjs,ts,mts}")
+ .replace(/\\/g, "/");
+
+ const apiPattern = path
+ .join(common.options.apiDir, "**/*.{js,mjs,ts,mts}")
+ .replace(/\\/g, "/");
const viteLogger = {
warnedMessages: new Set(),
@@ -129,7 +153,10 @@ export const create = async ({
return {
common,
- srcRegex,
+ indexingRegex,
+ apiRegex,
+ indexingPattern,
+ apiPattern,
viteDevServer,
viteNodeServer,
viteNodeRunner,
@@ -146,36 +173,54 @@ export const start = async (
buildService: Service,
{
watch,
- onBuild,
+ onIndexingBuild,
+ onApiBuild,
}:
- | { watch: true; onBuild: (buildResult: BuildResult) => void }
- | { watch: false; onBuild?: never },
-): Promise => {
+ | {
+ watch: true;
+ onIndexingBuild: (buildResult: IndexingBuildResult) => void;
+ onApiBuild: (buildResult: ApiBuildResult) => void;
+ }
+ | { watch: false; onIndexingBuild?: never; onApiBuild?: never },
+): Promise<{ indexing: IndexingBuildResult; api: ApiBuildResult }> => {
const { common } = buildService;
// Note: Don't run these in parallel. If there are circular imports in user code,
// it's possible for ViteNodeRunner to return exports as undefined (a race condition).
const configResult = await executeConfig(buildService);
const schemaResult = await executeSchema(buildService);
- const indexingFunctionsResult = await executeIndexingFunctions(buildService);
+ const indexingResult = await executeIndexingFunctions(buildService);
+ const apiResult = await executeApiRoutes(buildService);
if (configResult.status === "error") {
- return { status: "error", error: configResult.error };
+ return {
+ indexing: { status: "error", error: configResult.error },
+ api: { status: "error", error: configResult.error },
+ };
}
if (schemaResult.status === "error") {
- return { status: "error", error: schemaResult.error };
+ return {
+ indexing: { status: "error", error: schemaResult.error },
+ api: { status: "error", error: schemaResult.error },
+ };
}
- if (indexingFunctionsResult.status === "error") {
- return { status: "error", error: indexingFunctionsResult.error };
+ if (indexingResult.status === "error") {
+ return {
+ indexing: { status: "error", error: indexingResult.error },
+ api: { status: "error", error: indexingResult.error },
+ };
+ }
+ if (apiResult.status === "error") {
+ return {
+ indexing: { status: "error", error: apiResult.error },
+ api: { status: "error", error: apiResult.error },
+ };
}
- const rawBuild: RawBuild = {
- config: configResult,
- schema: schemaResult,
- indexingFunctions: indexingFunctionsResult,
- };
-
- const buildResult = await validateAndBuild(buildService, rawBuild);
+ let cachedConfigResult = configResult;
+ let cachedSchemaResult = schemaResult;
+ let cachedIndexingResult = indexingResult;
+ let cachedApiResult = apiResult;
// If watch is false (`ponder start` or `ponder serve`),
// don't register any event handlers on the watcher.
@@ -224,13 +269,23 @@ export const start = async (
const hasSchemaUpdate = invalidated.includes(
common.options.schemaFile.replace(/\\/g, "/"),
);
- const hasIndexingFunctionUpdate = invalidated.some((file) =>
- buildService.srcRegex.test(file),
+ const hasIndexingUpdate = invalidated.some(
+ (file) =>
+ buildService.indexingRegex.test(file) &&
+ !buildService.apiRegex.test(file),
+ );
+ const hasApiUpdate = invalidated.some((file) =>
+ buildService.apiRegex.test(file),
);
// This branch could trigger if you change a `note.txt` file within `src/`.
// Note: We could probably do a better job filtering out files in `isFileIgnored`.
- if (!hasConfigUpdate && !hasSchemaUpdate && !hasIndexingFunctionUpdate) {
+ if (
+ !hasConfigUpdate &&
+ !hasSchemaUpdate &&
+ !hasIndexingUpdate &&
+ !hasApiUpdate
+ ) {
return;
}
@@ -244,38 +299,132 @@ export const start = async (
if (hasConfigUpdate) {
const result = await executeConfig(buildService);
if (result.status === "error") {
- onBuild({ status: "error", error: result.error });
+ onIndexingBuild({ status: "error", error: result.error });
return;
}
- rawBuild.config = result;
+ cachedConfigResult = result;
}
if (hasSchemaUpdate) {
const result = await executeSchema(buildService);
if (result.status === "error") {
- onBuild({ status: "error", error: result.error });
+ onIndexingBuild({ status: "error", error: result.error });
return;
}
- rawBuild.schema = result;
+ cachedSchemaResult = result;
}
- if (hasIndexingFunctionUpdate) {
+ if (hasIndexingUpdate) {
+ const files = glob.sync(buildService.indexingPattern, {
+ ignore: buildService.apiPattern,
+ });
+ buildService.viteNodeRunner.moduleCache.invalidateDepTree(files);
+ buildService.viteNodeRunner.moduleCache.deleteByModuleId("@/generated");
+
const result = await executeIndexingFunctions(buildService);
if (result.status === "error") {
- onBuild({ status: "error", error: result.error });
+ onIndexingBuild({ status: "error", error: result.error });
+ return;
+ }
+ cachedIndexingResult = result;
+ }
+
+ if (hasApiUpdate) {
+ const files = glob.sync(buildService.apiPattern);
+ buildService.viteNodeRunner.moduleCache.invalidateDepTree(files);
+ buildService.viteNodeRunner.moduleCache.deleteByModuleId("@/generated");
+
+ const result = await executeApiRoutes(buildService);
+ if (result.status === "error") {
+ onApiBuild({ status: "error", error: result.error });
return;
}
- rawBuild.indexingFunctions = result;
+ cachedApiResult = result;
}
- const buildResult = await validateAndBuild(buildService, rawBuild);
- onBuild(buildResult);
+ /**
+ * Build and validate updated indexing and api artifacts
+ *
+ * There are a few cases to handle:
+ * 1) config or schema is updated -> rebuild both api and indexing
+ * 2) indexing functions are updated -> rebuild indexing
+ * 3) api routes are updated -> rebuild api
+ *
+ * Note: the api build cannot be successful if the indexing
+ * build fails, this means that any indexing errors are always
+ * propogated to the api build.
+ */
+
+ const indexingBuildResult = await validateAndBuild(
+ buildService,
+ cachedConfigResult,
+ cachedSchemaResult,
+ cachedIndexingResult,
+ );
+ if (indexingBuildResult.status === "error") {
+ onIndexingBuild(indexingBuildResult);
+ onApiBuild(indexingBuildResult);
+ return;
+ }
+
+ // If schema or config is updated, rebuild both api and indexing
+ if (hasConfigUpdate || hasSchemaUpdate) {
+ onIndexingBuild(indexingBuildResult);
+ onApiBuild(
+ validateAndBuildApi(
+ buildService,
+ indexingBuildResult.build,
+ cachedApiResult,
+ ),
+ );
+ } else {
+ if (hasIndexingUpdate) {
+ onIndexingBuild(indexingBuildResult);
+ }
+
+ if (hasApiUpdate) {
+ onApiBuild(
+ validateAndBuildApi(
+ buildService,
+ indexingBuildResult.build,
+ cachedApiResult,
+ ),
+ );
+ }
+ }
};
buildService.viteDevServer.watcher.on("change", onFileChange);
}
- return buildResult;
+ // Build and validate initial indexing and server build.
+ // Note: the api build cannot be successful if the indexing
+ // build fails
+
+ const initialBuildResult = await validateAndBuild(
+ buildService,
+ configResult,
+ schemaResult,
+ indexingResult,
+ );
+
+ if (initialBuildResult.status === "error") {
+ return {
+ indexing: { status: "error", error: initialBuildResult.error },
+ api: { status: "error", error: initialBuildResult.error },
+ };
+ }
+
+ const initialApiBuildResult = validateAndBuildApi(
+ buildService,
+ initialBuildResult.build,
+ apiResult,
+ );
+
+ return {
+ indexing: initialBuildResult,
+ api: initialApiBuildResult,
+ };
};
export const kill = async (buildService: Service): Promise => {
@@ -354,11 +503,9 @@ const executeIndexingFunctions = async (
}
| { status: "error"; error: Error }
> => {
- const pattern = path
- .join(buildService.common.options.srcDir, "**/*.{js,mjs,ts,mts}")
- .replace(/\\/g, "/");
- const files = glob.sync(pattern);
-
+ const files = glob.sync(buildService.indexingPattern, {
+ ignore: buildService.apiPattern,
+ });
const executeResults = await Promise.all(
files.map(async (file) => ({
...(await executeFile(buildService, { file })),
@@ -366,8 +513,6 @@ const executeIndexingFunctions = async (
})),
);
- const indexingFunctions: RawIndexingFunctions = [];
-
for (const executeResult of executeResults) {
if (executeResult.status === "error") {
buildService.common.logger.error({
@@ -381,8 +526,6 @@ const executeIndexingFunctions = async (
return executeResult;
}
-
- indexingFunctions.push(...(executeResult.exports?.ponder?.fns ?? []));
}
// Note that we are only hashing the file contents, not the exports. This is
@@ -390,7 +533,7 @@ const executeIndexingFunctions = async (
const hash = createHash("sha256");
for (const file of files) {
try {
- const contents = readFileSync(file, "utf-8");
+ const contents = fs.readFileSync(file, "utf-8");
hash.update(contents);
} catch (e) {
buildService.common.logger.warn({
@@ -402,16 +545,69 @@ const executeIndexingFunctions = async (
}
const contentHash = hash.digest("hex");
- return { status: "success", indexingFunctions, contentHash };
+ const exports = await buildService.viteNodeRunner.executeId("@/generated");
+
+ return {
+ status: "success",
+ indexingFunctions: exports.ponder.fns,
+ contentHash,
+ };
+};
+
+const executeApiRoutes = async (
+ buildService: Service,
+): Promise<
+ | {
+ status: "success";
+ app: Hono;
+ routes: PonderRoutes;
+ }
+ | { status: "error"; error: Error }
+> => {
+ const files = glob.sync(buildService.apiPattern);
+ const executeResults = await Promise.all(
+ files.map(async (file) => ({
+ ...(await executeFile(buildService, { file })),
+ file,
+ })),
+ );
+
+ for (const executeResult of executeResults) {
+ if (executeResult.status === "error") {
+ buildService.common.logger.error({
+ service: "build",
+ msg: `Error while executing '${path.relative(
+ buildService.common.options.rootDir,
+ executeResult.file,
+ )}':`,
+ error: executeResult.error,
+ });
+
+ return executeResult;
+ }
+ }
+
+ const exports = await buildService.viteNodeRunner.executeId("@/generated");
+
+ return {
+ status: "success",
+ app: exports.ponder.hono,
+ routes: exports.ponder.routes,
+ };
};
const validateAndBuild = async (
{ common }: Pick,
- rawBuild: RawBuild,
-): Promise => {
+ config: { config: Config; contentHash: string },
+ schema: { schema: Schema; contentHash: string },
+ indexingFunctions: {
+ indexingFunctions: RawIndexingFunctions;
+ contentHash: string;
+ },
+): Promise => {
// Validate and build the schema
const buildSchemaResult = safeBuildSchema({
- schema: rawBuild.schema.schema,
+ schema: schema.schema,
});
if (buildSchemaResult.status === "error") {
common.logger.error({
@@ -427,13 +623,13 @@ const validateAndBuild = async (
common.logger[log.level]({ service: "build", msg: log.msg });
}
- const graphqlSchema = buildGraphqlSchema(buildSchemaResult.schema);
+ const graphqlSchema = buildGraphQLSchema(buildSchemaResult.schema);
// Validates and build the config
const buildConfigAndIndexingFunctionsResult =
await safeBuildConfigAndIndexingFunctions({
- config: rawBuild.config.config,
- rawIndexingFunctions: rawBuild.indexingFunctions.indexingFunctions,
+ config: config.config,
+ rawIndexingFunctions: indexingFunctions.indexingFunctions,
options: common.options,
});
if (buildConfigAndIndexingFunctionsResult.status === "error") {
@@ -452,9 +648,9 @@ const validateAndBuild = async (
const buildId = createHash("sha256")
.update(BUILD_ID_VERSION)
- .update(rawBuild.config.contentHash)
- .update(rawBuild.schema.contentHash)
- .update(rawBuild.indexingFunctions.contentHash)
+ .update(config.contentHash)
+ .update(schema.contentHash)
+ .update(indexingFunctions.contentHash)
.digest("hex")
.slice(0, 10);
@@ -479,6 +675,40 @@ const validateAndBuild = async (
};
};
+const validateAndBuildApi = (
+ { common }: Pick,
+ baseBuild: BaseBuild,
+ api: { app: Hono; routes: PonderRoutes },
+): ApiBuildResult => {
+ for (const {
+ pathOrHandlers: [maybePathOrHandler],
+ } of api.routes) {
+ if (typeof maybePathOrHandler === "string") {
+ if (
+ maybePathOrHandler === "/status" ||
+ maybePathOrHandler === "/metrics" ||
+ maybePathOrHandler === "/health"
+ ) {
+ const error = new BuildError(
+ `Validation failed: API route "${maybePathOrHandler}" is reserved for internal use.`,
+ );
+ error.stack = undefined;
+ common.logger.error({ service: "build", msg: "Failed build", error });
+ return { status: "error", error } as const;
+ }
+ }
+ }
+
+ return {
+ status: "success",
+ build: {
+ ...baseBuild,
+ app: api.app,
+ routes: api.routes,
+ },
+ };
+};
+
const executeFile = async (
{ common, viteNodeRunner }: Service,
{ file }: { file: string },
diff --git a/packages/core/src/common/codegen.ts b/packages/core/src/common/codegen.ts
index a6c2ad396..59b5d6baa 100644
--- a/packages/core/src/common/codegen.ts
+++ b/packages/core/src/common/codegen.ts
@@ -1,8 +1,7 @@
import { mkdirSync, writeFileSync } from "node:fs";
import path from "node:path";
import type { Common } from "@/common/common.js";
-import type { GraphQLSchema } from "graphql";
-import { printSchema } from "graphql";
+import { type GraphQLSchema, printSchema } from "graphql";
export const ponderEnv = `// This file enables type checking and editor autocomplete for this Ponder project.
// After upgrading, you may find that changes have been made to this file.
@@ -27,6 +26,7 @@ declare module "@/generated" {
schema,
name
>;
+ export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
export type Schema = Virtual.Schema;
diff --git a/packages/core/src/common/errors.ts b/packages/core/src/common/errors.ts
index c7689ba47..f0b13bc75 100644
--- a/packages/core/src/common/errors.ts
+++ b/packages/core/src/common/errors.ts
@@ -1,4 +1,4 @@
-class BaseError extends Error {
+export class BaseError extends Error {
override name = "BaseError";
meta: string[] = [];
diff --git a/packages/core/src/common/options.ts b/packages/core/src/common/options.ts
index 21b7f02c2..8fcb23123 100644
--- a/packages/core/src/common/options.ts
+++ b/packages/core/src/common/options.ts
@@ -9,7 +9,8 @@ export type Options = {
configFile: string;
schemaFile: string;
rootDir: string;
- srcDir: string;
+ indexingDir: string;
+ apiDir: string;
generatedDir: string;
ponderDir: string;
logDir: string;
@@ -18,10 +19,6 @@ export type Options = {
hostname?: string;
maxHealthcheckDuration: number;
- graphqlMaxOperationTokens: number;
- graphqlMaxOperationDepth: number;
- graphqlMaxOperationAliases: number;
-
telemetryUrl: string;
telemetryDisabled: boolean;
telemetryConfigDir: string | undefined;
@@ -82,7 +79,8 @@ export const buildOptions = ({ cliOptions }: { cliOptions: CliOptions }) => {
rootDir,
configFile: path.join(rootDir, cliOptions.config),
schemaFile: path.join(rootDir, "ponder.schema.ts"),
- srcDir: path.join(rootDir, "src"),
+ indexingDir: path.join(rootDir, "src"),
+ apiDir: path.join(rootDir, "src", "api"),
generatedDir: path.join(rootDir, "generated"),
ponderDir: path.join(rootDir, ".ponder"),
logDir: path.join(rootDir, ".ponder", "logs"),
@@ -91,12 +89,6 @@ export const buildOptions = ({ cliOptions }: { cliOptions: CliOptions }) => {
hostname,
maxHealthcheckDuration: 240, // 4 minutes
- // Default limits are from Apollo:
- // https://www.apollographql.com/blog/prevent-graph-misuse-with-operation-size-and-complexity-limits
- graphqlMaxOperationTokens: 1000,
- graphqlMaxOperationDepth: 100,
- graphqlMaxOperationAliases: 30,
-
telemetryUrl: "https://ponder.sh/api/telemetry",
telemetryDisabled: Boolean(process.env.PONDER_TELEMETRY_DISABLED),
telemetryConfigDir: undefined,
diff --git a/packages/core/src/common/telemetry.ts b/packages/core/src/common/telemetry.ts
index 671e0389f..913f05141 100644
--- a/packages/core/src/common/telemetry.ts
+++ b/packages/core/src/common/telemetry.ts
@@ -4,7 +4,7 @@ import { existsSync, readFileSync } from "node:fs";
import os from "node:os";
import path from "node:path";
import { promisify } from "node:util";
-import type { Build } from "@/build/service.js";
+import type { IndexingBuild } from "@/build/service.js";
import type { Options } from "@/common/options.js";
import { getTables } from "@/schema/utils.js";
import { startClock } from "@/utils/timer.js";
@@ -269,7 +269,7 @@ function getPackageJson(rootDir: string) {
}
}
-export function buildPayload(build: Build) {
+export function buildPayload(build: IndexingBuild) {
const table_count = Object.keys(getTables(build.schema)).length;
const indexing_function_count = Object.values(build.indexingFunctions).reduce(
(acc, f) => acc + Object.keys(f).length,
diff --git a/packages/core/src/database/postgres/service.ts b/packages/core/src/database/postgres/service.ts
index d00844bf5..352077f42 100644
--- a/packages/core/src/database/postgres/service.ts
+++ b/packages/core/src/database/postgres/service.ts
@@ -26,7 +26,7 @@ import {
} from "@/utils/checkpoint.js";
import { formatEta } from "@/utils/format.js";
import { hash } from "@/utils/hash.js";
-import { createPool } from "@/utils/pg.js";
+import { createPool, createReadonlyPool } from "@/utils/pg.js";
import { wait } from "@/utils/wait.js";
import {
type CreateTableBuilder,
@@ -67,7 +67,7 @@ export class PostgresDatabaseService implements BaseDatabaseService {
private internalPool: Pool;
private syncPool: Pool;
private indexingPool: Pool;
- private readonlyPool: Pool;
+ readonlyPool: Pool;
constructor({
common,
@@ -108,7 +108,7 @@ export class PostgresDatabaseService implements BaseDatabaseService {
application_name: `${userNamespace}_indexing`,
max: indexingMax,
});
- this.readonlyPool = createPool({
+ this.readonlyPool = createReadonlyPool({
...poolConfig,
application_name: `${userNamespace}_readonly`,
max: syncMax,
diff --git a/packages/core/src/database/sqlite/service.ts b/packages/core/src/database/sqlite/service.ts
index 02d0f178f..8fd82d0a9 100644
--- a/packages/core/src/database/sqlite/service.ts
+++ b/packages/core/src/database/sqlite/service.ts
@@ -24,7 +24,11 @@ import {
} from "@/utils/checkpoint.js";
import { formatEta } from "@/utils/format.js";
import { hash } from "@/utils/hash.js";
-import { type SqliteDatabase, createSqliteDatabase } from "@/utils/sqlite.js";
+import {
+ type SqliteDatabase,
+ createReadonlySqliteDatabase,
+ createSqliteDatabase,
+} from "@/utils/sqlite.js";
import { wait } from "@/utils/wait.js";
import {
type CreateTableBuilder,
@@ -53,6 +57,7 @@ export class SqliteDatabaseService implements BaseDatabaseService {
private internalDatabase: SqliteDatabase;
private syncDatabase: SqliteDatabase;
+ readonlyDatabase: SqliteDatabase;
db: HeadlessKysely;
readonlyDb: HeadlessKysely;
@@ -91,6 +96,11 @@ export class SqliteDatabaseService implements BaseDatabaseService {
`ATTACH DATABASE '${userDatabaseFile}' AS ${this.userNamespace}`,
);
+ this.readonlyDatabase = createReadonlySqliteDatabase(internalDatabaseFile);
+ this.readonlyDatabase.exec(
+ `ATTACH DATABASE '${userDatabaseFile}' AS ${this.userNamespace}`,
+ );
+
this.db = new HeadlessKysely({
name: "internal",
common,
@@ -133,7 +143,7 @@ export class SqliteDatabaseService implements BaseDatabaseService {
this.readonlyDb = new HeadlessKysely({
name: "readonly",
common,
- dialect: new SqliteDialect({ database: this.internalDatabase }),
+ dialect: new SqliteDialect({ database: this.readonlyDatabase }),
log(event) {
if (event.level === "query") {
common.metrics.ponder_sqlite_query_total.inc({
diff --git a/packages/core/src/drizzle/bigint.ts b/packages/core/src/drizzle/bigint.ts
new file mode 100644
index 000000000..0e499df67
--- /dev/null
+++ b/packages/core/src/drizzle/bigint.ts
@@ -0,0 +1,35 @@
+import { decodeToBigInt, encodeAsText } from "@/utils/encoding.js";
+import { entityKind } from "drizzle-orm";
+import {
+ type AnySQLiteTable,
+ SQLiteColumn,
+ SQLiteColumnBuilder,
+} from "drizzle-orm/sqlite-core";
+
+export class SQLiteBigintBuilder extends SQLiteColumnBuilder {
+ static readonly [entityKind]: string = "SQliteBigintBuilder";
+
+ constructor(columnName: string) {
+ super(columnName, "string", "SQLiteBigint");
+ }
+
+ build(table: AnySQLiteTable) {
+ return new SQLiteBigint(table, this.config);
+ }
+}
+
+export class SQLiteBigint extends SQLiteColumn {
+ static readonly [entityKind]: string = "SQLiteBigint";
+
+ getSQLType(): string {
+ return "varchar(79)";
+ }
+
+ override mapFromDriverValue(value: string) {
+ return decodeToBigInt(value);
+ }
+
+ override mapToDriverValue(value: bigint): string {
+ return encodeAsText(value as bigint);
+ }
+}
diff --git a/packages/core/src/drizzle/db.ts b/packages/core/src/drizzle/db.ts
new file mode 100644
index 000000000..76b87d550
--- /dev/null
+++ b/packages/core/src/drizzle/db.ts
@@ -0,0 +1,29 @@
+import type { Column, SQLWrapper, SelectedFields, Table } from "drizzle-orm";
+import type { SelectBuilder } from "./select.js";
+
+export type DrizzleDb = {
+ select(): SelectBuilder;
+ select>(
+ fields: TSelection,
+ ): SelectBuilder;
+ select(
+ fields?: SelectedFields,
+ ): SelectBuilder | undefined, "async", void>;
+ /**
+ * Execute a raw read-only SQL query..
+ *
+ * @example
+ * import { ponder } from "@/generated";
+ * import { sql } from "@ponder/core";
+ *
+ * ponder.get("/", async (c) => {
+ * const result = await c.db.execute(sql`SELECT * from "Accounts"`);
+ * return c.json(result);
+ * });
+ *
+ * @see https://orm.drizzle.team/docs/sql
+ */
+ execute: >(
+ query: SQLWrapper,
+ ) => Promise;
+};
diff --git a/packages/core/src/drizzle/hex.ts b/packages/core/src/drizzle/hex.ts
new file mode 100644
index 000000000..40708fd7a
--- /dev/null
+++ b/packages/core/src/drizzle/hex.ts
@@ -0,0 +1,68 @@
+import { entityKind } from "drizzle-orm";
+import {
+ type AnyPgTable,
+ PgColumn,
+ PgColumnBuilder,
+} from "drizzle-orm/pg-core";
+import {
+ type AnySQLiteTable,
+ SQLiteColumn,
+ SQLiteColumnBuilder,
+} from "drizzle-orm/sqlite-core";
+import { bytesToHex, hexToBytes } from "viem";
+
+export class PgHexBuilder extends PgColumnBuilder {
+ static readonly [entityKind]: string = "PgHexBuilder";
+
+ constructor(columnName: string) {
+ super(columnName, "buffer", "PgHex");
+ }
+
+ build(table: AnyPgTable) {
+ return new PgHex(table, this.config);
+ }
+}
+
+export class PgHex extends PgColumn {
+ static readonly [entityKind]: string = "PgHex";
+
+ getSQLType(): string {
+ return "bytea";
+ }
+
+ override mapFromDriverValue(value: Buffer) {
+ return bytesToHex(value);
+ }
+
+ override mapToDriverValue(value: `0x${string}`): Buffer {
+ return Buffer.from(hexToBytes(value));
+ }
+}
+
+export class SQLiteHexBuilder extends SQLiteColumnBuilder {
+ static readonly [entityKind]: string = "SQliteHexBuilder";
+
+ constructor(columnName: string) {
+ super(columnName, "buffer", "SQLiteHex");
+ }
+
+ build(table: AnySQLiteTable) {
+ return new SQLiteHex(table, this.config);
+ }
+}
+
+export class SQLiteHex extends SQLiteColumn {
+ static readonly [entityKind]: string = "SQLiteHex";
+
+ getSQLType(): string {
+ return "blob";
+ }
+
+ override mapFromDriverValue(value: Buffer) {
+ return bytesToHex(value);
+ }
+
+ override mapToDriverValue(value: `0x${string}`): Buffer {
+ return Buffer.from(hexToBytes(value));
+ }
+}
diff --git a/packages/core/src/drizzle/json.ts b/packages/core/src/drizzle/json.ts
new file mode 100644
index 000000000..1503ce56f
--- /dev/null
+++ b/packages/core/src/drizzle/json.ts
@@ -0,0 +1,34 @@
+import { entityKind } from "drizzle-orm";
+import {
+ type AnySQLiteTable,
+ SQLiteColumn,
+ SQLiteColumnBuilder,
+} from "drizzle-orm/sqlite-core";
+
+export class SQLiteJsonBuilder extends SQLiteColumnBuilder {
+ static readonly [entityKind]: string = "SQliteJsonBuilder";
+
+ constructor(columnName: string) {
+ super(columnName, "json", "SQLiteJson");
+ }
+
+ build(table: AnySQLiteTable) {
+ return new SQLiteJson(table, this.config);
+ }
+}
+
+export class SQLiteJson extends SQLiteColumn {
+ static readonly [entityKind]: string = "SQLiteJson";
+
+ getSQLType(): string {
+ return "jsonb";
+ }
+
+ override mapFromDriverValue(value: string) {
+ return JSON.parse(value);
+ }
+
+ override mapToDriverValue(value: object): string {
+ return JSON.stringify(value);
+ }
+}
diff --git a/packages/core/src/drizzle/list.ts b/packages/core/src/drizzle/list.ts
new file mode 100644
index 000000000..2139c96ce
--- /dev/null
+++ b/packages/core/src/drizzle/list.ts
@@ -0,0 +1,100 @@
+import type { Scalar } from "@/schema/common.js";
+import { entityKind } from "drizzle-orm";
+import {
+ type AnyPgTable,
+ PgColumn,
+ PgColumnBuilder,
+} from "drizzle-orm/pg-core";
+import {
+ type AnySQLiteTable,
+ SQLiteColumn,
+ SQLiteColumnBuilder,
+} from "drizzle-orm/sqlite-core";
+
+export class PgListBuilder extends PgColumnBuilder {
+ static readonly [entityKind]: string = "PgListBuilder";
+ element: Scalar;
+
+ constructor(columnName: string, element: Scalar) {
+ super(columnName, "string", "PgList");
+ this.element = element;
+ }
+
+ build(table: AnyPgTable) {
+ return new PgList(table, this.config, this.element);
+ }
+}
+
+export class PgList extends PgColumn {
+ static readonly [entityKind]: string = "PgList";
+ element: Scalar;
+
+ constructor(
+ table: AnyPgTable,
+ config: PgListBuilder["config"],
+ element: Scalar,
+ ) {
+ super(table, config);
+ this.element = element;
+ }
+
+ getSQLType(): string {
+ return "text";
+ }
+
+ override mapFromDriverValue(value: string) {
+ return this.element === "bigint"
+ ? JSON.parse(value).map(BigInt)
+ : JSON.parse(value);
+ }
+
+ override mapToDriverValue(value: Array): string {
+ return this.element === "bigint"
+ ? JSON.stringify(value.map(String))
+ : JSON.stringify(value);
+ }
+}
+
+export class SQLiteListBuilder extends SQLiteColumnBuilder {
+ static readonly [entityKind]: string = "SQliteListBuilder";
+ element: Scalar;
+
+ constructor(columnName: string, element: Scalar) {
+ super(columnName, "string", "PgList");
+ this.element = element;
+ }
+
+ build(table: AnySQLiteTable) {
+ return new SQLiteList(table, this.config, this.element);
+ }
+}
+
+export class SQLiteList extends SQLiteColumn {
+ static readonly [entityKind]: string = "SQLiteList";
+ element: Scalar;
+
+ constructor(
+ table: AnyPgTable,
+ config: SQLiteListBuilder["config"],
+ element: Scalar,
+ ) {
+ super(table, config);
+ this.element = element;
+ }
+
+ getSQLType(): string {
+ return "text";
+ }
+
+ override mapFromDriverValue(value: string) {
+ return this.element === "bigint"
+ ? JSON.parse(value).map(BigInt)
+ : JSON.parse(value);
+ }
+
+ override mapToDriverValue(value: Array): string {
+ return this.element === "bigint"
+ ? JSON.stringify(value.map(String))
+ : JSON.stringify(value);
+ }
+}
diff --git a/packages/core/src/drizzle/runtime.test.ts b/packages/core/src/drizzle/runtime.test.ts
new file mode 100644
index 000000000..3e8978947
--- /dev/null
+++ b/packages/core/src/drizzle/runtime.test.ts
@@ -0,0 +1,267 @@
+import {
+ setupCommon,
+ setupDatabaseServices,
+ setupIsolatedDatabase,
+} from "@/_test/setup.js";
+import type { Context } from "@/hono/context.js";
+import type { HistoricalStore } from "@/indexing-store/store.js";
+import { createSchema } from "@/schema/schema.js";
+import { eq } from "drizzle-orm";
+import { beforeEach, expect, test } from "vitest";
+import type { DrizzleDb } from "./db.js";
+import { createDrizzleDb, createDrizzleTables } from "./runtime.js";
+
+beforeEach(setupCommon);
+beforeEach(setupIsolatedDatabase);
+
+test("runtime select", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({ tableName: "table", id: "kyle" });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db.select().from(drizzleTables.table);
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({ id: "kyle" });
+
+ await cleanup();
+});
+
+test("select hex", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.hex(),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({ tableName: "table", id: "0x1" });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db.select().from(drizzleTables.table);
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({ id: "0x01" });
+
+ await cleanup();
+});
+
+test("select bigint", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.bigint(),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({ tableName: "table", id: 1n });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db.select().from(drizzleTables.table);
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({ id: 1n });
+
+ await cleanup();
+});
+
+test("select json", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ json: p.json(),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({
+ tableName: "table",
+ id: "1",
+ data: {
+ json: {
+ prop: 52,
+ },
+ },
+ });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db.select().from(drizzleTables.table);
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({ id: "1", json: { prop: 52 } });
+
+ await cleanup();
+});
+
+test("select enum", async (context) => {
+ const schema = createSchema((p) => ({
+ en: p.createEnum(["hi", "low"]),
+ table: p.createTable({
+ id: p.string(),
+ en: p.enum("en"),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({
+ tableName: "table",
+ id: "1",
+ data: { en: "hi" },
+ });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db.select().from(drizzleTables.table);
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({ id: "1", en: "hi" });
+
+ await cleanup();
+});
+
+test("select list", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ list: p.string().list(),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({
+ tableName: "table",
+ id: "1",
+ data: {
+ list: ["big", "dog"],
+ },
+ });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db.select().from(drizzleTables.table);
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({ id: "1", list: ["big", "dog"] });
+
+ await cleanup();
+});
+
+test("select with join", async (context) => {
+ const schema = createSchema((p) => ({
+ account: p.createTable({
+ id: p.hex(),
+ name: p.string(),
+ age: p.int(),
+ }),
+ nft: p.createTable({
+ id: p.bigint(),
+ owner: p.hex().references("account.id"),
+ }),
+ }));
+
+ const { database, cleanup, indexingStore, namespaceInfo } =
+ await setupDatabaseServices(context, { schema });
+
+ await indexingStore.create({
+ tableName: "account",
+ id: "0x1",
+ data: {
+ name: "kyle",
+ age: 52,
+ },
+ });
+ await indexingStore.create({
+ tableName: "nft",
+ id: 10n,
+ data: { owner: "0x1" },
+ });
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const db = createDrizzleDb(database) as unknown as DrizzleDb;
+
+ const drizzleTables = createDrizzleTables(
+ schema,
+ database,
+ namespaceInfo.userNamespace,
+ ) as Context["tables"];
+
+ const rows = await db
+ .select()
+ .from(drizzleTables.account)
+ .fullJoin(
+ drizzleTables.nft,
+ eq(drizzleTables.account.id, drizzleTables.nft.owner),
+ );
+
+ expect(rows).toHaveLength(1);
+ expect(rows[0]).toMatchObject({
+ account: { id: "0x01", name: "kyle", age: 52 },
+ nft: { id: 10n, owner: "0x01" },
+ });
+
+ await cleanup();
+});
diff --git a/packages/core/src/drizzle/runtime.ts b/packages/core/src/drizzle/runtime.ts
new file mode 100644
index 000000000..33369d35a
--- /dev/null
+++ b/packages/core/src/drizzle/runtime.ts
@@ -0,0 +1,256 @@
+import type { DatabaseService } from "@/database/service.js";
+import type { Scalar, Schema } from "@/schema/common.js";
+import {
+ isEnumColumn,
+ isJSONColumn,
+ isListColumn,
+ isMaterialColumn,
+ isOptionalColumn,
+ isReferenceColumn,
+ isScalarColumn,
+} from "@/schema/utils.js";
+import { getTables } from "@/schema/utils.js";
+import { drizzle as drizzleSQLite } from "drizzle-orm/better-sqlite3";
+import { drizzle as drizzlePg } from "drizzle-orm/node-postgres";
+import { pgSchema, pgTable } from "drizzle-orm/pg-core";
+import {
+ doublePrecision as PgDoublePrecision,
+ integer as PgInteger,
+ jsonb as PgJsonb,
+ numeric as PgNumeric,
+ text as PgText,
+} from "drizzle-orm/pg-core";
+import {
+ integer as SQLiteInteger,
+ real as SQLiteReal,
+ text as SQLiteText,
+ sqliteTable,
+} from "drizzle-orm/sqlite-core";
+import { SQLiteBigintBuilder } from "./bigint.js";
+import { PgHexBuilder, SQLiteHexBuilder } from "./hex.js";
+import { SQLiteJsonBuilder } from "./json.js";
+import { PgListBuilder, SQLiteListBuilder } from "./list.js";
+
+export const createDrizzleDb = (database: DatabaseService) => {
+ if (database.kind === "postgres") {
+ const drizzle = drizzlePg(database.readonlyPool);
+ return {
+ // @ts-ignore
+ select: (...args: any[]) => drizzle.select(...args),
+ execute: (query: any) => drizzle.execute(query),
+ };
+ } else {
+ const drizzle = drizzleSQLite(database.readonlyDatabase);
+ return {
+ // @ts-ignore
+ select: (...args: any[]) => drizzle.select(...args),
+ execute: (query: any) => {
+ try {
+ try {
+ return drizzle.all(query);
+ } catch (e) {
+ const error = e as Error;
+ if (
+ error.name === "SqliteError" &&
+ error.message ===
+ "This statement does not return data. Use run() instead"
+ ) {
+ return drizzle.run(query);
+ } else {
+ throw error;
+ }
+ }
+ } catch (e) {
+ const error = e as Error;
+ if (error.cause) throw error.cause;
+ throw error;
+ }
+ },
+ };
+ }
+};
+
+type SQLiteTable = Parameters[1];
+type PostgresTable = Parameters[1];
+type DrizzleTable = { [tableName: string]: any };
+
+export const createDrizzleTables = (
+ schema: Schema,
+ database: DatabaseService,
+ dbNamespace: string,
+) => {
+ const drizzleTables: { [tableName: string]: DrizzleTable } = {};
+
+ for (const [tableName, { table }] of Object.entries(getTables(schema))) {
+ const drizzleColumns: DrizzleTable = {};
+
+ for (const [columnName, column] of Object.entries(table)) {
+ if (isMaterialColumn(column)) {
+ if (isJSONColumn(column)) {
+ drizzleColumns[columnName] = convertJsonColumn(
+ columnName,
+ database.kind,
+ );
+ } else if (isEnumColumn(column)) {
+ if (isListColumn(column)) {
+ drizzleColumns[columnName] = convertListColumn(
+ columnName,
+ database.kind,
+ "string",
+ );
+ } else {
+ drizzleColumns[columnName] = convertEnumColumn(
+ columnName,
+ database.kind,
+ );
+ }
+ } else if (isScalarColumn(column) || isReferenceColumn(column)) {
+ if (isListColumn(column)) {
+ drizzleColumns[columnName] = convertListColumn(
+ columnName,
+ database.kind,
+ column[" scalar"],
+ );
+ } else {
+ switch (column[" scalar"]) {
+ case "string":
+ drizzleColumns[columnName] = convertStringColumn(
+ columnName,
+ database.kind,
+ );
+ break;
+
+ case "int":
+ drizzleColumns[columnName] = convertIntColumn(
+ columnName,
+ database.kind,
+ );
+ break;
+
+ case "boolean":
+ drizzleColumns[columnName] = convertBooleanColumn(
+ columnName,
+ database.kind,
+ );
+ break;
+
+ case "float":
+ drizzleColumns[columnName] = convertFloatColumn(
+ columnName,
+ database.kind,
+ );
+ break;
+
+ case "hex":
+ drizzleColumns[columnName] = convertHexColumn(
+ columnName,
+ database.kind,
+ );
+ break;
+
+ case "bigint":
+ drizzleColumns[columnName] = convertBigintColumn(
+ columnName,
+ database.kind,
+ );
+ break;
+ }
+ }
+
+ // apply column constraints
+ if (columnName === "id") {
+ drizzleColumns[columnName] =
+ drizzleColumns[columnName]!.primaryKey();
+ } else if (isOptionalColumn(column) === false) {
+ drizzleColumns[columnName] = drizzleColumns[columnName]!.notNull();
+ }
+ }
+ }
+ }
+
+ if (database.kind === "postgres") {
+ // Note: this is to avoid an error thrown by drizzle when
+ // setting schema to "public".
+ if (dbNamespace === "public") {
+ drizzleTables[tableName] = pgTable(
+ tableName,
+ drizzleColumns as PostgresTable,
+ );
+ } else {
+ drizzleTables[tableName] = pgSchema(dbNamespace).table(
+ tableName,
+ drizzleColumns as PostgresTable,
+ );
+ }
+ } else {
+ drizzleTables[tableName] = sqliteTable(
+ tableName,
+ drizzleColumns as SQLiteTable,
+ );
+ }
+ }
+
+ return drizzleTables;
+};
+
+const convertStringColumn = (
+ columnName: string,
+ kind: "sqlite" | "postgres",
+) => {
+ return kind === "sqlite" ? SQLiteText(columnName) : PgText(columnName);
+};
+
+const convertIntColumn = (columnName: string, kind: "sqlite" | "postgres") => {
+ return kind === "sqlite" ? SQLiteInteger(columnName) : PgInteger(columnName);
+};
+
+const convertFloatColumn = (
+ columnName: string,
+ kind: "sqlite" | "postgres",
+) => {
+ return kind === "sqlite"
+ ? SQLiteReal(columnName)
+ : PgDoublePrecision(columnName);
+};
+
+const convertBooleanColumn = (
+ columnName: string,
+ kind: "sqlite" | "postgres",
+) => {
+ return kind === "sqlite" ? SQLiteInteger(columnName) : PgInteger(columnName);
+};
+
+const convertHexColumn = (columnName: string, kind: "sqlite" | "postgres") => {
+ return kind === "sqlite"
+ ? new SQLiteHexBuilder(columnName)
+ : new PgHexBuilder(columnName);
+};
+
+const convertBigintColumn = (
+ columnName: string,
+ kind: "sqlite" | "postgres",
+) => {
+ return kind === "sqlite"
+ ? new SQLiteBigintBuilder(columnName)
+ : PgNumeric(columnName, { precision: 78 });
+};
+
+const convertListColumn = (
+ columnName: string,
+ kind: "sqlite" | "postgres",
+ element: Scalar,
+) => {
+ return kind === "sqlite"
+ ? new SQLiteListBuilder(columnName, element)
+ : new PgListBuilder(columnName, element);
+};
+
+const convertJsonColumn = (columnName: string, kind: "sqlite" | "postgres") => {
+ return kind === "sqlite"
+ ? new SQLiteJsonBuilder(columnName)
+ : PgJsonb(columnName);
+};
+
+const convertEnumColumn = (columnName: string, kind: "sqlite" | "postgres") => {
+ return kind === "sqlite" ? SQLiteText(columnName) : PgText(columnName);
+};
diff --git a/packages/core/src/drizzle/select.ts b/packages/core/src/drizzle/select.ts
new file mode 100644
index 000000000..dc6a592d2
--- /dev/null
+++ b/packages/core/src/drizzle/select.ts
@@ -0,0 +1,709 @@
+import type {
+ Assume,
+ Column,
+ MakeColumnConfig,
+ QueryPromise,
+ SelectedFields,
+ SelectedFieldsOrdered,
+ Subquery,
+ Table,
+ TableConfig,
+ UpdateTableConfig,
+ ValidateShape,
+ entityKind,
+} from "drizzle-orm";
+import { TypedQueryBuilder } from "drizzle-orm/query-builders/query-builder";
+import type {
+ AddAliasToSelection,
+ AppendToNullabilityMap,
+ AppendToResult,
+ BuildSubquerySelection,
+ GetSelectTableName,
+ GetSelectTableSelection,
+ JoinNullability,
+ JoinType,
+ SelectMode,
+ SelectResult,
+ SetOperator,
+} from "drizzle-orm/query-builders/select.types";
+import type {
+ ColumnsSelection,
+ Placeholder,
+ Query,
+ SQL,
+ View,
+} from "drizzle-orm/sql";
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L54
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L50
+ */
+export type SelectBuilder<
+ TSelection extends SelectedFields | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TBuilderMode extends "db" | "qb" = "db",
+> = {
+ from: (
+ source: TFrom,
+ ) => CreateSelectFromBuilderMode<
+ TBuilderMode,
+ GetSelectTableName,
+ TResultType,
+ TRunResult,
+ TSelection extends undefined ? GetSelectTableSelection : TSelection,
+ TSelection extends undefined ? "single" : "partial"
+ >;
+};
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L126
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L130
+ */
+export abstract class SelectQueryBuilderBase<
+ THKT extends SelectHKTBase,
+ TTableName extends string | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TSelection extends ColumnsSelection,
+ TSelectMode extends SelectMode,
+ TNullabilityMap extends Record<
+ string,
+ JoinNullability
+ > = TTableName extends string ? Record : {},
+ TDynamic extends boolean = false,
+ TExcludedMethods extends string = never,
+ TResult extends any[] = SelectResult<
+ TSelection,
+ TSelectMode,
+ TNullabilityMap
+ >[],
+ TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
+ TSelection,
+ TNullabilityMap
+ >,
+> extends TypedQueryBuilder {
+ declare [entityKind]: string;
+ declare _: {
+ readonly hkt: THKT;
+ readonly tableName: TTableName;
+ readonly resultType: TResultType;
+ readonly runResult: TRunResult;
+ readonly selection: TSelection;
+ readonly selectMode: TSelectMode;
+ readonly nullabilityMap: TNullabilityMap;
+ readonly dynamic: TDynamic;
+ readonly excludedMethods: TExcludedMethods;
+ readonly result: TResult;
+ readonly selectedFields: TSelectedFields;
+ };
+
+ declare leftJoin: JoinFn;
+ declare rightJoin: JoinFn;
+ declare innerJoin: JoinFn;
+ declare fullJoin: JoinFn;
+
+ private declare setOperator: >(
+ rightSelection:
+ | ((
+ setOperators: GetSetOperators,
+ ) => SetOperatorRightSelect)
+ | SetOperatorRightSelect,
+ ) => SelectWithout;
+
+ declare union: typeof this.setOperator;
+ declare unionAll: typeof this.setOperator;
+ declare intersect: typeof this.setOperator;
+ declare intersectAll: typeof this.setOperator;
+ declare except: typeof this.setOperator;
+ declare exceptAll: typeof this.setOperator;
+
+ declare where: (
+ where: ((aliases: TSelection) => SQL | undefined) | SQL | undefined,
+ ) => SelectWithout;
+
+ declare having: (
+ having:
+ | ((aliases: this["_"]["selection"]) => SQL | undefined)
+ | SQL
+ | undefined,
+ ) => SelectWithout;
+
+ declare groupBy: (
+ ...columns: (Column | SQL)[]
+ ) => SelectWithout;
+
+ declare orderBy: (
+ ...columns: (Column | SQL)[]
+ ) => SelectWithout;
+
+ declare limit: (
+ limit: number | Placeholder,
+ ) => SelectWithout;
+
+ declare offset: (
+ offset: number | Placeholder,
+ ) => SelectWithout;
+
+ declare toSQL: () => Query;
+
+ declare as: (
+ alias: TAlias,
+ ) => SubqueryWithSelection;
+
+ declare $dynamic: () => SelectDynamic;
+}
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L803
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L903
+ */
+export type SelectBase<
+ TTableName extends string | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TSelection extends ColumnsSelection,
+ TSelectMode extends SelectMode = "single",
+ TNullabilityMap extends Record<
+ string,
+ JoinNullability
+ > = TTableName extends string ? Record : {},
+ TDynamic extends boolean = false,
+ TExcludedMethods extends string = never,
+ TResult = SelectResult[],
+ TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
+ TSelection,
+ TNullabilityMap
+ >,
+> = SelectQueryBuilderBase<
+ SelectHKT,
+ TTableName,
+ TResultType,
+ TRunResult,
+ TSelection,
+ TSelectMode,
+ TNullabilityMap,
+ TDynamic,
+ TExcludedMethods,
+ // @ts-ignore
+ TResult,
+ TSelectedFields
+> &
+ QueryPromise;
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L31
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L30
+ */
+export type SelectJoinConfig = {
+ on: SQL;
+ table: Table | Subquery | View | SQL;
+ alias: string | undefined;
+ joinType: JoinType;
+};
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L38
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L30
+ */
+export type BuildAliasTable<
+ tableOrView extends Table | View,
+ alias extends string,
+> = tableOrView extends Table
+ ? TableWithColumns<
+ UpdateTableConfig<
+ tableOrView["_"]["config"],
+ {
+ name: alias;
+ columns: MapColumnsToTableAlias;
+ }
+ >
+ >
+ : tableOrView extends View
+ ? ViewWithSelection<
+ alias,
+ tableOrView["_"]["existing"],
+ MapColumnsToTableAlias
+ >
+ : never;
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L52
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L52
+ */
+export type SelectConfig = {
+ withList?: Subquery[];
+ fields: Record;
+ fieldsFlat?: SelectedFieldsOrdered;
+ where?: SQL;
+ having?: SQL;
+ table: Table | Subquery | View | SQL;
+ limit?: number | Placeholder;
+ offset?: number | Placeholder;
+ joins?: SelectJoinConfig[];
+ orderBy?: (Column | SQL | SQL.Aliased)[];
+ groupBy?: (Column | SQL | SQL.Aliased)[];
+ distinct?: boolean;
+ setOperators: {
+ rightSelect: TypedQueryBuilder;
+ type: SetOperator;
+ isAll: boolean;
+ orderBy?: (Column | SQL | SQL.Aliased)[];
+ limit?: number | Placeholder;
+ offset?: number | Placeholder;
+ }[];
+};
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L75
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L82
+ */
+export type Join<
+ T extends AnySelectQueryBuilder,
+ TDynamic extends boolean,
+ TJoinType extends JoinType,
+ TJoinedTable extends Table | Subquery | View | SQL,
+ TJoinedName extends
+ GetSelectTableName = GetSelectTableName,
+> = T extends any
+ ? SelectWithout<
+ SelectKind<
+ T["_"]["hkt"],
+ T["_"]["tableName"],
+ T["_"]["resultType"],
+ T["_"]["runResult"],
+ AppendToResult<
+ T["_"]["tableName"],
+ T["_"]["selection"],
+ TJoinedName,
+ TJoinedTable extends Table
+ ? TJoinedTable["_"]["columns"]
+ : TJoinedTable extends Subquery | View
+ ? Assume<
+ TJoinedTable["_"]["selectedFields"],
+ SelectedFields
+ >
+ : never,
+ T["_"]["selectMode"]
+ >,
+ T["_"]["selectMode"] extends "partial"
+ ? T["_"]["selectMode"]
+ : "multiple",
+ AppendToNullabilityMap<
+ T["_"]["nullabilityMap"],
+ TJoinedName,
+ TJoinType
+ >,
+ T["_"]["dynamic"],
+ T["_"]["excludedMethods"]
+ >,
+ TDynamic,
+ T["_"]["excludedMethods"]
+ >
+ : never;
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L106
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L111
+ */
+export type JoinFn<
+ T extends AnySelectQueryBuilder,
+ TDynamic extends boolean,
+ TJoinType extends JoinType,
+> = <
+ TJoinedTable extends Table | Subquery | View | SQL,
+ TJoinedName extends
+ GetSelectTableName = GetSelectTableName,
+>(
+ table: TJoinedTable,
+ on: ((aliases: T["_"]["selection"]) => SQL | undefined) | SQL | undefined,
+) => Join;
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/query-builders/select.types.ts#L75
+ */
+type MapColumnsToTableAlias<
+ TColumns extends ColumnsSelection,
+ TAlias extends string,
+> = {
+ [Key in keyof TColumns]: TColumns[Key] extends Column
+ ? Column["_"], TAlias>>
+ : TColumns[Key];
+} & {};
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L124
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L146
+ */
+export type SelectHKTBase = {
+ tableName: string | undefined;
+ resultType: "sync" | "async";
+ runResult: unknown;
+ selection: unknown;
+ selectMode: SelectMode;
+ nullabilityMap: unknown;
+ dynamic: boolean;
+ excludedMethods: string;
+ result: unknown;
+ selectedFields: unknown;
+ _type: unknown;
+};
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L138
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L158
+ */
+export type SelectKind<
+ T extends SelectHKTBase,
+ TTableName extends string | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TSelection extends ColumnsSelection,
+ TSelectMode extends SelectMode,
+ TNullabilityMap extends Record,
+ TDynamic extends boolean,
+ TExcludedMethods extends string,
+ TResult = SelectResult[],
+ TSelectedFields = BuildSubquerySelection,
+> = (T & {
+ tableName: TTableName;
+ resultType: TResultType;
+ runResult: TRunResult;
+ selection: TSelection;
+ selectMode: TSelectMode;
+ nullabilityMap: TNullabilityMap;
+ dynamic: TDynamic;
+ excludedMethods: TExcludedMethods;
+ result: TResult;
+ selectedFields: TSelectedFields;
+})["_type"];
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L163
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L179
+ */
+export interface SelectQueryBuilderHKT extends SelectHKTBase {
+ _type: SelectQueryBuilderBase<
+ SelectQueryBuilderHKT,
+ this["tableName"],
+ this["resultType"],
+ this["runResult"],
+ Assume,
+ this["selectMode"],
+ Assume>,
+ this["dynamic"],
+ this["excludedMethods"],
+ Assume,
+ Assume
+ >;
+}
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L179
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L193
+ */
+export interface SelectHKT extends SelectHKTBase {
+ _type: SelectBase<
+ this["tableName"],
+ this["resultType"],
+ this["runResult"],
+ Assume,
+ this["selectMode"],
+ Assume>,
+ this["dynamic"],
+ this["excludedMethods"],
+ Assume,
+ Assume
+ >;
+}
+
+export type SetOperatorExcludedMethods =
+ | "leftJoin"
+ | "rightJoin"
+ | "innerJoin"
+ | "fullJoin"
+ | "where"
+ | "having"
+ | "groupBy";
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L204
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L206
+ */
+export type CreateSelectFromBuilderMode<
+ TBuilderMode extends "db" | "qb",
+ TTableName extends string | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TSelection extends ColumnsSelection,
+ TSelectMode extends SelectMode,
+> = TBuilderMode extends "db"
+ ? SelectBase
+ : SelectQueryBuilderBase<
+ SelectQueryBuilderHKT,
+ TTableName,
+ TResultType,
+ TRunResult,
+ TSelection,
+ TSelectMode
+ >;
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L227
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L224
+ */
+type SelectWithout<
+ T extends AnySelectQueryBuilder,
+ TDynamic extends boolean,
+ K extends keyof T & string,
+ TResetExcluded extends boolean = false,
+> = TDynamic extends true
+ ? T
+ : Omit<
+ SelectKind<
+ T["_"]["hkt"],
+ T["_"]["tableName"],
+ T["_"]["resultType"],
+ T["_"]["runResult"],
+ T["_"]["selection"],
+ T["_"]["selectMode"],
+ T["_"]["nullabilityMap"],
+ TDynamic,
+ TResetExcluded extends true ? K : T["_"]["excludedMethods"] | K,
+ T["_"]["result"],
+ T["_"]["selectedFields"]
+ >,
+ TResetExcluded extends true ? K : T["_"]["excludedMethods"] | K
+ >;
+
+export type SelectDynamic = SelectKind<
+ T["_"]["hkt"],
+ T["_"]["tableName"],
+ T["_"]["resultType"],
+ T["_"]["runResult"],
+ T["_"]["selection"],
+ T["_"]["selectMode"],
+ T["_"]["nullabilityMap"],
+ true,
+ never,
+ T["_"]["result"],
+ T["_"]["selectedFields"]
+>;
+
+export type AnySelectQueryBuilder = SelectQueryBuilderBase<
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any
+>;
+
+export type AnySetOperatorInterface = SetOperatorInterface<
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any
+>;
+
+export interface SetOperatorInterface<
+ TTableName extends string | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TSelection extends ColumnsSelection,
+ TSelectMode extends SelectMode = "single",
+ TNullabilityMap extends Record<
+ string,
+ JoinNullability
+ > = TTableName extends string ? Record : {},
+ TDynamic extends boolean = false,
+ TExcludedMethods extends string = never,
+ TResult extends any[] = SelectResult<
+ TSelection,
+ TSelectMode,
+ TNullabilityMap
+ >[],
+ TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
+ TSelection,
+ TNullabilityMap
+ >,
+> {
+ _: {
+ readonly hkt: SelectHKTBase;
+ readonly tableName: TTableName;
+ readonly resultType: TResultType;
+ readonly runResult: TRunResult;
+ readonly selection: TSelection;
+ readonly selectMode: TSelectMode;
+ readonly nullabilityMap: TNullabilityMap;
+ readonly dynamic: TDynamic;
+ readonly excludedMethods: TExcludedMethods;
+ readonly result: TResult;
+ readonly selectedFields: TSelectedFields;
+ };
+}
+
+export type SetOperatorWithResult = SetOperatorInterface<
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ TResult,
+ any
+>;
+
+export type SetOperatorRightSelect<
+ TValue extends SetOperatorWithResult,
+ TResult extends any[],
+> = TValue extends SetOperatorInterface<
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ infer TValueResult,
+ any
+>
+ ? ValidateShape<
+ TValueResult[number],
+ TResult[number],
+ TypedQueryBuilder
+ >
+ : TValue;
+
+export type SetOperatorRestSelect<
+ TValue extends readonly SetOperatorWithResult[],
+ TResult extends any[],
+> = TValue extends [infer First, ...infer Rest]
+ ? First extends SetOperatorInterface<
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ any,
+ infer TValueResult,
+ any
+ >
+ ? Rest extends AnySetOperatorInterface[]
+ ? [
+ ValidateShape<
+ TValueResult[number],
+ TResult[number],
+ TypedQueryBuilder
+ >,
+ ...SetOperatorRestSelect,
+ ]
+ : ValidateShape<
+ TValueResult[number],
+ TResult[number],
+ TypedQueryBuilder[]
+ >
+ : never
+ : TValue;
+
+export type CreateSetOperatorFn = <
+ TTableName extends string | undefined,
+ TResultType extends "sync" | "async",
+ TRunResult,
+ TSelection extends ColumnsSelection,
+ TValue extends SetOperatorWithResult,
+ TRest extends SetOperatorWithResult[],
+ TSelectMode extends SelectMode = "single",
+ TNullabilityMap extends Record<
+ string,
+ JoinNullability
+ > = TTableName extends string ? Record : {},
+ TDynamic extends boolean = false,
+ TExcludedMethods extends string = never,
+ TResult extends any[] = SelectResult<
+ TSelection,
+ TSelectMode,
+ TNullabilityMap
+ >[],
+ TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
+ TSelection,
+ TNullabilityMap
+ >,
+>(
+ leftSelect: SetOperatorInterface<
+ TTableName,
+ TResultType,
+ TRunResult,
+ TSelection,
+ TSelectMode,
+ TNullabilityMap,
+ TDynamic,
+ TExcludedMethods,
+ TResult,
+ TSelectedFields
+ >,
+ rightSelect: SetOperatorRightSelect,
+ ...restSelects: SetOperatorRestSelect
+) => SelectWithout<
+ SelectBase<
+ TTableName,
+ TResultType,
+ TRunResult,
+ TSelection,
+ TSelectMode,
+ TNullabilityMap,
+ TDynamic,
+ TExcludedMethods,
+ TResult,
+ TSelectedFields
+ >,
+ false,
+ SetOperatorExcludedMethods,
+ true
+>;
+
+export type GetSetOperators = {
+ union: CreateSetOperatorFn;
+ intersect: CreateSetOperatorFn;
+ except: CreateSetOperatorFn;
+ unionAll: CreateSetOperatorFn;
+};
+
+export type SubqueryWithSelection<
+ TSelection extends ColumnsSelection,
+ TAlias extends string,
+> = Subquery> &
+ AddAliasToSelection;
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/table.ts#L49
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/table.ts#L43
+ */
+export type TableWithColumns = Table & {
+ [key in keyof T["columns"]]: T["columns"][key];
+};
+
+/**
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/view.ts#L154
+ * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/view.ts#L305
+ */
+export type ViewWithSelection<
+ TName extends string,
+ TExisting extends boolean,
+ TSelection extends ColumnsSelection,
+> = View & TSelection;
diff --git a/packages/core/src/drizzle/table.test-d.ts b/packages/core/src/drizzle/table.test-d.ts
new file mode 100644
index 000000000..7c57393be
--- /dev/null
+++ b/packages/core/src/drizzle/table.test-d.ts
@@ -0,0 +1,152 @@
+import { createSchema } from "@/index.js";
+import { eq } from "drizzle-orm";
+import type { Hex } from "viem";
+import { expectTypeOf, test } from "vitest";
+import type { DrizzleDb } from "./db.js";
+import type { DrizzleTable } from "./table.js";
+
+test("select query promise", async () => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ name: p.int().optional(),
+ }),
+ }));
+
+ const table = {} as DrizzleTable<
+ "table",
+ (typeof schema)["table"]["table"],
+ typeof schema
+ >;
+
+ const result = await ({} as DrizzleDb).select({ id: table.id }).from(table);
+ // ^?
+
+ expectTypeOf<{ id: string }[]>(result);
+});
+
+test("select optional column", async () => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ name: p.int().optional(),
+ }),
+ }));
+
+ const table = {} as DrizzleTable<
+ "table",
+ (typeof schema)["table"]["table"],
+ typeof schema
+ >;
+
+ const result = await ({} as DrizzleDb).select().from(table);
+ // ^?
+
+ expectTypeOf<{ id: string; name: number | null }[]>(result);
+});
+
+test("select enum", async () => {
+ const schema = createSchema((p) => ({
+ e: p.createEnum(["yes", "no"]),
+ table: p.createTable({
+ id: p.string(),
+ e: p.enum("e"),
+ }),
+ }));
+
+ const table = {} as DrizzleTable<
+ "table",
+ (typeof schema)["table"]["table"],
+ typeof schema
+ >;
+
+ const result = await ({} as DrizzleDb).select().from(table);
+ // ^?
+
+ expectTypeOf<{ id: string; e: "yes" | "no" }[]>(result);
+});
+
+test("select json", async () => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ json: p.json<{ a: number; b: string }>(),
+ }),
+ }));
+
+ const table = {} as DrizzleTable<
+ "table",
+ (typeof schema)["table"]["table"],
+ typeof schema
+ >;
+
+ const result = await ({} as DrizzleDb).select().from(table);
+ // ^?
+
+ expectTypeOf<{ id: string; json: { a: number; b: string } }[]>(result);
+});
+
+test("select list", async () => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ list: p.string().list(),
+ }),
+ }));
+
+ const table = {} as DrizzleTable<
+ "table",
+ (typeof schema)["table"]["table"],
+ typeof schema
+ >;
+
+ const result = await ({} as DrizzleDb).select().from(table);
+ // ^?
+
+ expectTypeOf<{ id: string; list: string[] }[]>(result);
+});
+
+test("select join", async () => {
+ const schema = createSchema((p) => ({
+ account: p.createTable({
+ id: p.hex(),
+ name: p.string(),
+ age: p.int(),
+ }),
+ nft: p.createTable({
+ id: p.bigint(),
+ owner: p.hex().references("account.id"),
+ }),
+ }));
+
+ const account = {} as DrizzleTable<
+ "account",
+ (typeof schema)["account"]["table"],
+ typeof schema
+ >;
+ const nft = {} as DrizzleTable<
+ "nft",
+ (typeof schema)["nft"]["table"],
+ typeof schema
+ >;
+
+ const result = await ({} as DrizzleDb)
+ // ^?
+ .select()
+ .from(account)
+ .fullJoin(nft, eq(account.id, nft.owner));
+
+ expectTypeOf<
+ {
+ account: {
+ id: Hex;
+ name: string;
+ age: number;
+ } | null;
+ nft: {
+ id: bigint;
+ owner: Hex;
+ } | null;
+ }[]
+ >(result);
+});
diff --git a/packages/core/src/drizzle/table.ts b/packages/core/src/drizzle/table.ts
new file mode 100644
index 000000000..6fe78fc6a
--- /dev/null
+++ b/packages/core/src/drizzle/table.ts
@@ -0,0 +1,51 @@
+import type {
+ EnumColumn,
+ ExtractNonVirtualColumnNames,
+ JSONColumn,
+ Schema as PonderSchema,
+ Table as PonderTable,
+ ReferenceColumn,
+ ScalarColumn,
+} from "@/schema/common.js";
+import type { InferColumnType } from "@/schema/infer.js";
+import type { BuildColumns, ColumnBuilderBase } from "drizzle-orm";
+import type { TableWithColumns } from "./select.js";
+
+/**
+ * Performs type transformation between Ponder and Drizzle column representation.
+ *
+ * @returns TableWithColumns
+ */
+export type DrizzleTable<
+ tableName extends string,
+ table extends PonderTable,
+ schema extends PonderSchema,
+> = TableWithColumns<{
+ name: tableName;
+ schema: undefined;
+ columns: BuildColumns<
+ tableName,
+ {
+ [columnName in ExtractNonVirtualColumnNames]: ColumnBuilderBase<{
+ name: columnName & string;
+ dataType: "custom";
+ columnType: "ponder";
+ data: InferColumnType;
+ driverParam: unknown;
+ enumValues: undefined;
+ notNull: (table[columnName] &
+ (
+ | ScalarColumn
+ | ReferenceColumn
+ | EnumColumn
+ | JSONColumn
+ ))[" optional"] extends true
+ ? false
+ : true;
+ primaryKey: columnName extends "id" ? true : false;
+ }>;
+ },
+ "common"
+ >;
+ dialect: "common";
+}>;
diff --git a/packages/core/src/server/graphql/buildGraphqlSchema.test.ts b/packages/core/src/graphql/buildGraphqlSchema.test.ts
similarity index 95%
rename from packages/core/src/server/graphql/buildGraphqlSchema.test.ts
rename to packages/core/src/graphql/buildGraphqlSchema.test.ts
index 51ae22ca0..68dc6becd 100644
--- a/packages/core/src/server/graphql/buildGraphqlSchema.test.ts
+++ b/packages/core/src/graphql/buildGraphqlSchema.test.ts
@@ -9,7 +9,7 @@ import { createSchema } from "@/schema/schema.js";
import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js";
import { type GraphQLType, execute, parse } from "graphql";
import { beforeEach, expect, test } from "vitest";
-import { buildGraphqlSchema } from "./buildGraphqlSchema.js";
+import { buildGraphQLSchema } from "./buildGraphqlSchema.js";
import { buildLoaderCache } from "./buildLoaderCache.js";
beforeEach(setupCommon);
@@ -50,7 +50,7 @@ test("scalar", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -118,7 +118,7 @@ test("scalar list", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -186,7 +186,7 @@ test("scalar optional", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -254,7 +254,7 @@ test("scalar optional list", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -312,7 +312,7 @@ test("json", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -361,7 +361,7 @@ test("enum", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -410,7 +410,7 @@ test("enum optional", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -468,7 +468,7 @@ test("enum list", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -526,7 +526,7 @@ test("enum optional list", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -580,7 +580,7 @@ test("one", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -651,7 +651,7 @@ test("many", async (context) => {
id: "0",
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -732,7 +732,7 @@ test("many w/ filter", async (context) => {
id: "0",
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -787,7 +787,7 @@ test("bigint id", async (context) => {
id: 0n,
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -829,7 +829,7 @@ test("hex id", async (context) => {
id: "0x00",
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -873,7 +873,7 @@ test("filter string eq", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -935,7 +935,7 @@ test("filter string in", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1009,7 +1009,7 @@ test("filter string contains", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1083,7 +1083,7 @@ test("filter string starts with", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1157,7 +1157,7 @@ test("filter string not ends with", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1219,7 +1219,7 @@ test("filter int eq", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1293,7 +1293,7 @@ test("filter int gt", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1355,7 +1355,7 @@ test("filter int lte", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1417,7 +1417,7 @@ test("filter int in", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1479,7 +1479,7 @@ test("filter float eq", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1553,7 +1553,7 @@ test("filter float gt", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1615,7 +1615,7 @@ test("filter float lte", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1677,7 +1677,7 @@ test("filter float in", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1739,7 +1739,7 @@ test("filter bigint eq", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1813,7 +1813,7 @@ test("filter bigint gt", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1875,7 +1875,7 @@ test("filter bigint lte", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1937,7 +1937,7 @@ test("filter bigint in", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -1999,7 +1999,7 @@ test("filer hex eq", async (context) => {
await create("0", indexingStore);
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2073,7 +2073,7 @@ test("filter hex gt", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2147,7 +2147,7 @@ test("filter string list eq", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2221,7 +2221,7 @@ test("filter string list has", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2286,7 +2286,7 @@ test("filter enum eq", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2341,7 +2341,7 @@ test("filter enum in", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2401,7 +2401,7 @@ test("filter ref eq", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2468,7 +2468,7 @@ test("filter ref in", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2554,7 +2554,7 @@ test("order int asc", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2638,7 +2638,7 @@ test("order bigint asc", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2722,7 +2722,7 @@ test("order bigint desc", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2780,7 +2780,7 @@ test("limit default", async (context) => {
await create(String(i), indexingStore);
}
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2831,7 +2831,7 @@ test("limit", async (context) => {
await create(String(i), indexingStore);
}
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2882,7 +2882,7 @@ test("limit error", async (context) => {
// await create(String(i), indexingStore);
// }
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
@@ -2936,7 +2936,7 @@ test("filter type has correct suffixes and types", () => {
}),
}));
- const serverSchema = buildGraphqlSchema(s);
+ const serverSchema = buildGraphQLSchema(s);
const typeMap = serverSchema.getTypeMap();
@@ -3048,7 +3048,7 @@ test("metadata", async (context) => {
const metadataStore = getMetadataStore({
encoding: database.kind,
- db: database.readonlyDb,
+ db: database.indexingDb,
namespaceInfo,
});
@@ -3062,7 +3062,7 @@ test("metadata", async (context) => {
},
});
- const graphqlSchema = buildGraphqlSchema(schema);
+ const graphqlSchema = buildGraphQLSchema(schema);
const document = parse(`
query {
diff --git a/packages/core/src/server/graphql/buildGraphqlSchema.ts b/packages/core/src/graphql/buildGraphqlSchema.ts
similarity index 97%
rename from packages/core/src/server/graphql/buildGraphqlSchema.ts
rename to packages/core/src/graphql/buildGraphqlSchema.ts
index 87978df62..1cc918e9b 100644
--- a/packages/core/src/server/graphql/buildGraphqlSchema.ts
+++ b/packages/core/src/graphql/buildGraphqlSchema.ts
@@ -22,7 +22,7 @@ export type Context = {
metadataStore: MetadataStore;
};
-export const buildGraphqlSchema = (schema: Schema): GraphQLSchema => {
+export const buildGraphQLSchema = (schema: Schema): GraphQLSchema => {
const queryFields: Record> = {};
const { enumTypes } = buildEnumTypes({ schema });
diff --git a/packages/core/src/server/graphql/buildLoaderCache.ts b/packages/core/src/graphql/buildLoaderCache.ts
similarity index 100%
rename from packages/core/src/server/graphql/buildLoaderCache.ts
rename to packages/core/src/graphql/buildLoaderCache.ts
diff --git a/packages/core/src/server/graphql/entity.ts b/packages/core/src/graphql/entity.ts
similarity index 99%
rename from packages/core/src/server/graphql/entity.ts
rename to packages/core/src/graphql/entity.ts
index 351779059..ef5697248 100644
--- a/packages/core/src/server/graphql/entity.ts
+++ b/packages/core/src/graphql/entity.ts
@@ -23,9 +23,9 @@ import {
GraphQLObjectType,
GraphQLString,
} from "graphql";
-import { GraphQLJSON } from "graphql-type-json";
import type { Context, Parent } from "./buildGraphqlSchema.js";
import { buildWhereObject } from "./filter.js";
+import { GraphQLJSON } from "./graphQLJson.js";
import type { PluralResolver } from "./plural.js";
import { SCALARS } from "./scalar.js";
diff --git a/packages/core/src/server/graphql/enum.ts b/packages/core/src/graphql/enum.ts
similarity index 100%
rename from packages/core/src/server/graphql/enum.ts
rename to packages/core/src/graphql/enum.ts
diff --git a/packages/core/src/server/graphql/filter.test.ts b/packages/core/src/graphql/filter.test.ts
similarity index 100%
rename from packages/core/src/server/graphql/filter.test.ts
rename to packages/core/src/graphql/filter.test.ts
diff --git a/packages/core/src/server/graphql/filter.ts b/packages/core/src/graphql/filter.ts
similarity index 100%
rename from packages/core/src/server/graphql/filter.ts
rename to packages/core/src/graphql/filter.ts
diff --git a/packages/core/src/graphql/graphQLJson.ts b/packages/core/src/graphql/graphQLJson.ts
new file mode 100644
index 000000000..083d559dc
--- /dev/null
+++ b/packages/core/src/graphql/graphQLJson.ts
@@ -0,0 +1,63 @@
+import {
+ type GraphQLScalarLiteralParser,
+ GraphQLScalarType,
+ Kind,
+ type ObjectValueNode,
+ type ValueNode,
+ print,
+} from "graphql";
+
+// Modified from https://github.com/taion/graphql-type-json/blob/master/src/index.js
+
+export const GraphQLJSON = new GraphQLScalarType({
+ name: "JSON",
+ description:
+ "The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).",
+ serialize: (x) => x,
+ parseValue: (x) => x,
+ parseLiteral: (ast, variables) => {
+ if (ast.kind !== Kind.OBJECT) {
+ throw new TypeError(
+ `JSONObject cannot represent non-object value: ${print(ast)}`,
+ );
+ }
+
+ return parseObject(ast, variables);
+ },
+});
+
+const parseLiteral = (
+ ast: ValueNode,
+ variables: Parameters[1],
+): ReturnType> => {
+ switch (ast.kind) {
+ case Kind.STRING:
+ case Kind.BOOLEAN:
+ return ast.value;
+ case Kind.INT:
+ case Kind.FLOAT:
+ return Number.parseFloat(ast.value);
+ case Kind.OBJECT:
+ return parseObject(ast, variables);
+ case Kind.LIST:
+ return ast.values.map((n) => parseLiteral(n, variables));
+ case Kind.NULL:
+ return null;
+ case Kind.VARIABLE:
+ return variables ? variables[ast.name.value] : undefined;
+ default:
+ throw new TypeError(`JSON cannot represent value: ${print(ast)}`);
+ }
+};
+
+const parseObject = (
+ ast: ObjectValueNode,
+ variables: Parameters[1],
+) => {
+ const value = Object.create(null);
+ ast.fields.forEach((field) => {
+ value[field.name.value] = parseLiteral(field.value, variables);
+ });
+
+ return value;
+};
diff --git a/packages/core/src/graphql/index.test.ts b/packages/core/src/graphql/index.test.ts
new file mode 100644
index 000000000..a6e80ea11
--- /dev/null
+++ b/packages/core/src/graphql/index.test.ts
@@ -0,0 +1,316 @@
+import {
+ setupCommon,
+ setupDatabaseServices,
+ setupIsolatedDatabase,
+} from "@/_test/setup.js";
+import type { HistoricalStore, ReadonlyStore } from "@/indexing-store/store.js";
+import type { Schema } from "@/schema/common.js";
+import { createSchema } from "@/schema/schema.js";
+import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js";
+import { Hono } from "hono";
+import { createMiddleware } from "hono/factory";
+import { beforeEach, expect, test } from "vitest";
+import { graphql } from "./index.js";
+
+beforeEach(setupCommon);
+beforeEach(setupIsolatedDatabase);
+
+const contextMiddleware = (schema: Schema, readonlyStore: ReadonlyStore) =>
+ createMiddleware(async (c, next) => {
+ c.set("readonlyStore", readonlyStore);
+ c.set("schema", schema);
+ await next();
+ });
+
+test("graphQLMiddleware serves request", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ string: p.string(),
+ int: p.int(),
+ float: p.float(),
+ boolean: p.boolean(),
+ hex: p.hex(),
+ bigint: p.bigint(),
+ }),
+ }));
+
+ const { indexingStore, readonlyStore, cleanup } = await setupDatabaseServices(
+ context,
+ { schema },
+ );
+
+ await indexingStore.create({
+ tableName: "table",
+ encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
+ id: "0",
+ data: {
+ string: "0",
+ int: 0,
+ float: 0,
+ boolean: false,
+ hex: "0x0",
+ bigint: 0n,
+ },
+ });
+
+ await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
+
+ const app = new Hono()
+ .use(contextMiddleware(schema, readonlyStore))
+ .use("/graphql", graphql());
+
+ const response = await app.request("/graphql", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({
+ query: `
+ query {
+ table(id: "0") {
+ id
+ string
+ int
+ float
+ boolean
+ hex
+ bigint
+ }
+ }
+ `,
+ }),
+ });
+
+ expect(response.status).toBe(200);
+
+ expect(await response.json()).toMatchObject({
+ data: {
+ table: {
+ id: "0",
+ string: "0",
+ int: 0,
+ float: 0,
+ boolean: false,
+ hex: "0x00",
+ bigint: "0",
+ },
+ },
+ });
+
+ await cleanup();
+});
+
+test("graphQLMiddleware throws error when extra filter is applied", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({
+ id: p.string(),
+ string: p.string(),
+ int: p.int(),
+ float: p.float(),
+ boolean: p.boolean(),
+ hex: p.hex(),
+ bigint: p.bigint(),
+ }),
+ }));
+
+ const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
+ schema,
+ });
+
+ const app = new Hono()
+ .use(contextMiddleware(schema, readonlyStore))
+ .use("/graphql", graphql());
+
+ const response = await app.request("/graphql", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ query: `
+ {
+ table(id: "0", doesntExist: "kevin") {
+ id
+ string
+ int
+ float
+ boolean
+ hex
+ bigint
+ }
+ }
+ `,
+ }),
+ });
+
+ expect(response.status).toBe(200);
+ const body = await response.json();
+ expect(body.errors[0].message).toBe(
+ 'Unknown argument "doesntExist" on field "Query.table".',
+ );
+
+ await cleanup();
+});
+
+test("graphQLMiddleware throws error for token limit", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({ id: p.string() }),
+ }));
+
+ const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
+ schema,
+ });
+
+ const app = new Hono()
+ .use(contextMiddleware(schema, readonlyStore))
+ .use("/graphql", graphql({ maxOperationTokens: 3 }));
+
+ const response = await app.request("/graphql", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ query: `
+ {
+ __schema {
+ types {
+ fields {
+ type {
+ fields {
+ type {
+ description
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ `,
+ }),
+ });
+
+ expect(response.status).toBe(200);
+ const body = await response.json();
+ expect(body.errors[0].message).toBe(
+ "Syntax Error: Token limit of 3 exceeded.",
+ );
+
+ await cleanup();
+});
+
+test("graphQLMiddleware throws error for depth limit", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({ id: p.string() }),
+ }));
+
+ const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
+ schema,
+ });
+
+ const app = new Hono()
+ .use(contextMiddleware(schema, readonlyStore))
+ .use("/graphql", graphql({ maxOperationDepth: 5 }));
+
+ const response = await app.request("/graphql", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ query: `
+ {
+ __schema {
+ types {
+ fields {
+ type {
+ fields {
+ type {
+ description
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ `,
+ }),
+ });
+
+ expect(response.status).toBe(200);
+ const body = await response.json();
+ expect(body.errors[0].message).toBe(
+ "Syntax Error: Query depth limit of 5 exceeded, found 7.",
+ );
+
+ await cleanup();
+});
+
+test("graphQLMiddleware throws error for max aliases", async (context) => {
+ const schema = createSchema((p) => ({
+ table: p.createTable({ id: p.string() }),
+ }));
+
+ const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
+ schema,
+ });
+
+ const app = new Hono()
+ .use(contextMiddleware(schema, readonlyStore))
+ .use("/graphql", graphql({ maxOperationAliases: 2 }));
+
+ const response = await app.request("/graphql", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ query: `
+ {
+ __schema {
+ types {
+ fields {
+ type {
+ alias1: fields {
+ type {
+ description
+ }
+ }
+ alias2: fields {
+ type {
+ description
+ }
+ }
+ alias3: fields {
+ type {
+ description
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ `,
+ }),
+ });
+
+ expect(response.status).toBe(200);
+ const body = await response.json();
+ expect(body.errors[0].message).toBe(
+ "Syntax Error: Aliases limit of 2 exceeded, found 3.",
+ );
+
+ await cleanup();
+});
+
+test("graphQLMiddleware interactive", async (context) => {
+ const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
+ schema: {},
+ });
+
+ const app = new Hono()
+ .use(contextMiddleware({}, readonlyStore))
+ .use("/graphql", graphql({ maxOperationAliases: 2 }));
+
+ const response = await app.request("/graphql");
+
+ expect(response.status).toBe(200);
+
+ await cleanup();
+});
diff --git a/packages/core/src/graphql/index.ts b/packages/core/src/graphql/index.ts
new file mode 100644
index 000000000..bdabd7650
--- /dev/null
+++ b/packages/core/src/graphql/index.ts
@@ -0,0 +1,86 @@
+import { graphiQLHtml } from "@/ui/graphiql.html.js";
+import { maxAliasesPlugin } from "@escape.tech/graphql-armor-max-aliases";
+import { maxDepthPlugin } from "@escape.tech/graphql-armor-max-depth";
+import { maxTokensPlugin } from "@escape.tech/graphql-armor-max-tokens";
+import { type YogaServerInstance, createYoga } from "graphql-yoga";
+import { createMiddleware } from "hono/factory";
+import { buildGraphQLSchema } from "./buildGraphqlSchema.js";
+import { buildLoaderCache } from "./buildLoaderCache.js";
+
+/**
+ * Middleware for GraphQL with an interactive web view.
+ *
+ * - Docs: https://ponder.sh/docs/query/api-functions#register-graphql-middleware
+ *
+ * @example
+ * import { ponder } from "@/generated";
+ * import { graphql } from "@ponder/core";
+ *
+ * ponder.use("/graphql", graphql());
+ *
+ */
+export const graphql = (
+ {
+ maxOperationTokens = 1000,
+ maxOperationDepth = 100,
+ maxOperationAliases = 30,
+ }: {
+ maxOperationTokens?: number;
+ maxOperationDepth?: number;
+ maxOperationAliases?: number;
+ } = {
+ // Default limits are from Apollo:
+ // https://www.apollographql.com/blog/prevent-graph-misuse-with-operation-size-and-complexity-limit
+ maxOperationTokens: 1000,
+ maxOperationDepth: 100,
+ maxOperationAliases: 30,
+ },
+) => {
+ let yoga: YogaServerInstance | undefined = undefined;
+
+ return createMiddleware(async (c) => {
+ if (c.req.method === "GET") {
+ return c.html(graphiQLHtml(c.req.path));
+ }
+
+ if (yoga === undefined) {
+ const readonlyStore = c.get("readonlyStore");
+ const metadataStore = c.get("metadataStore");
+ const schema = c.get("schema");
+ const graphqlSchema = buildGraphQLSchema(schema);
+
+ yoga = createYoga({
+ schema: graphqlSchema,
+ context: () => {
+ const getLoader = buildLoaderCache({ store: readonlyStore });
+ return { readonlyStore, metadataStore, getLoader };
+ },
+ graphqlEndpoint: c.req.path,
+ maskedErrors: process.env.NODE_ENV === "production",
+ logging: false,
+ graphiql: false,
+ parserAndValidationCache: false,
+ plugins: [
+ maxTokensPlugin({ n: maxOperationTokens }),
+ maxDepthPlugin({
+ n: maxOperationDepth,
+ ignoreIntrospection: false,
+ }),
+ maxAliasesPlugin({
+ n: maxOperationAliases,
+ allowList: [],
+ }),
+ ],
+ });
+ }
+
+ const response = await yoga.handle(c.req.raw);
+ // TODO: Figure out why Yoga is returning 500 status codes for GraphQL errors.
+ // @ts-expect-error
+ response.status = 200;
+ // @ts-expect-error
+ response.statusText = "OK";
+
+ return response;
+ });
+};
diff --git a/packages/core/src/server/graphql/metadata.ts b/packages/core/src/graphql/metadata.ts
similarity index 77%
rename from packages/core/src/server/graphql/metadata.ts
rename to packages/core/src/graphql/metadata.ts
index c6ba3e8d2..cdf527dbd 100644
--- a/packages/core/src/server/graphql/metadata.ts
+++ b/packages/core/src/graphql/metadata.ts
@@ -1,5 +1,5 @@
import { GraphQLObjectType } from "graphql";
-import { GraphQLJSON } from "graphql-type-json";
+import { GraphQLJSON } from "./graphQLJson.js";
export const metadataEntity = new GraphQLObjectType({
name: "_meta",
diff --git a/packages/core/src/server/graphql/plural.ts b/packages/core/src/graphql/plural.ts
similarity index 100%
rename from packages/core/src/server/graphql/plural.ts
rename to packages/core/src/graphql/plural.ts
diff --git a/packages/core/src/server/graphql/scalar.ts b/packages/core/src/graphql/scalar.ts
similarity index 100%
rename from packages/core/src/server/graphql/scalar.ts
rename to packages/core/src/graphql/scalar.ts
diff --git a/packages/core/src/server/graphql/singular.ts b/packages/core/src/graphql/singular.ts
similarity index 100%
rename from packages/core/src/server/graphql/singular.ts
rename to packages/core/src/graphql/singular.ts
diff --git a/packages/core/src/hono/context.ts b/packages/core/src/hono/context.ts
new file mode 100644
index 000000000..067b2dfc7
--- /dev/null
+++ b/packages/core/src/hono/context.ts
@@ -0,0 +1,52 @@
+import type { Schema } from "@/schema/common.js";
+import type { ApiContext } from "@/types/api.js";
+import type { Env, Context as HonoContext, Input } from "hono";
+
+export type Context<
+ schema extends Schema = Schema,
+ path extends string = string,
+ input extends Input = {},
+> = ApiContext & {
+ /**
+ * Hono request object.
+ *
+ * @see https://hono.dev/docs/api/context#req
+ */
+ req: HonoContext["req"];
+ /**
+ * Hono response object.
+ *
+ * @see https://hono.dev/docs/api/context#res
+ */
+ res: HonoContext["req"];
+ /**
+ * Return the HTTP response.
+ *
+ * @see https://hono.dev/docs/api/context#body
+ */
+ body: HonoContext["body"];
+ /**
+ * Render text as `Content-Type:text/plain`.
+ *
+ * @see https://hono.dev/docs/api/context#text
+ */
+ text: HonoContext["text"];
+ /**
+ * Render JSON as `Content-Type:application/json`.
+ *
+ * @see https://hono.dev/docs/api/context#json
+ */
+ json: HonoContext["json"];
+ /**
+ * Hono redirect.
+ *
+ * @see https://hono.dev/docs/api/context#redirect
+ */
+ redirect: HonoContext["redirect"];
+};
+
+export type MiddlewareContext<
+ schema extends Schema = Schema,
+ path extends string = string,
+ input extends Input = {},
+> = ApiContext & HonoContext;
diff --git a/packages/core/src/hono/handler.ts b/packages/core/src/hono/handler.ts
new file mode 100644
index 000000000..725a95764
--- /dev/null
+++ b/packages/core/src/hono/handler.ts
@@ -0,0 +1,760 @@
+import type { Schema } from "@/schema/common.js";
+import type { ApiRegistry } from "@/types/api.js";
+import type { BlankInput, HandlerResponse, Input, Next } from "hono/types";
+import type { Context, MiddlewareContext } from "./context.js";
+
+export type Handler<
+ schema extends Schema = Schema,
+ path extends string = any,
+ input extends Input = BlankInput,
+ response extends HandlerResponse = any,
+> = (c: Context) => response;
+
+export type MiddlewareHandler<
+ schema extends Schema = Schema,
+ path extends string = string,
+ input extends Input = {},
+> = (
+ c: MiddlewareContext,
+ next: Next,
+) => Promise;
+
+type BasePath = "/";
+
+export type HandlerInterface = {
+ // app.get(handler)
+ <
+ path extends string = BasePath,
+ input extends Input = BlankInput,
+ response extends HandlerResponse = any,
+ >(
+ handler: Handler,
+ ): ApiRegistry;
+
+ // app.get(handler x2)
+ <
+ path extends string = BasePath,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ response extends HandlerResponse = any,
+ >(
+ ...handlers: [
+ Handler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(path, handler)
+ <
+ path extends string,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ >(
+ path: path,
+ handler: Handler,
+ ): ApiRegistry;
+
+ // app.get(handler x 3)
+ <
+ path extends string = BasePath,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ input3 extends Input = input & input2,
+ >(
+ ...handlers: [
+ MiddlewareHandler,
+ MiddlewareHandler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(path, handler x2)
+ <
+ path extends string,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ >(
+ path: path,
+ ...handlers: [
+ MiddlewareHandler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(handler x 4)
+ <
+ path extends string = BasePath,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ input3 extends Input = input & input2,
+ input4 extends Input = input & input2 & input3,
+ >(
+ ...handlers: [
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(path, handler x3)
+ <
+ path extends string,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ input3 extends Input = input & input2,
+ >(
+ path: path,
+ ...handlers: [
+ MiddlewareHandler,
+ MiddlewareHandler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(handler x 5)
+ <
+ path extends string = BasePath,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ input3 extends Input = input & input2,
+ input4 extends Input = input & input2 & input3,
+ input5 extends Input = input & input2 & input3 & input4,
+ >(
+ ...handlers: [
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(path, handler x4)
+ <
+ path extends string,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ input3 extends Input = input & input2,
+ input4 extends Input = input & input2 & input3,
+ >(
+ path: path,
+ ...handlers: [
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ Handler,
+ ]
+ ): ApiRegistry;
+
+ // app.get(handler x 6)
+ <
+ path extends string = BasePath,
+ response extends HandlerResponse = any,
+ input extends Input = BlankInput,
+ input2 extends Input = input,
+ input3 extends Input = input & input2,
+ input4 extends Input = input & input2 & input3,
+ input5 extends Input = input & input2 & input3 & input4,
+ input6 extends Input = input & input2 & input3 & input4 & input5,
+ >(
+ ...handlers: [
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ MiddlewareHandler,
+ Handler